mirror of
https://github.com/github/codeql-action.git
synced 2026-01-05 14:10:11 +08:00
Merge branch 'main' into report-action-aborted
This commit is contained in:
@@ -4,17 +4,17 @@ import * as analysisPaths from './analysis-paths';
|
||||
import * as configUtils from './config-utils';
|
||||
|
||||
test("emptyPaths", async t => {
|
||||
let config = new configUtils.Config();
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
|
||||
let config = new configUtils.Config();
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
|
||||
});
|
||||
|
||||
test("nonEmptyPaths", async t => {
|
||||
let config = new configUtils.Config();
|
||||
config.paths.push('path1', 'path2');
|
||||
config.pathsIgnore.push('path3', 'path4');
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path3\npath4');
|
||||
let config = new configUtils.Config();
|
||||
config.paths.push('path1', 'path2');
|
||||
config.pathsIgnore.push('path3', 'path4');
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path3\npath4');
|
||||
});
|
||||
|
||||
@@ -3,21 +3,21 @@ import * as core from '@actions/core';
|
||||
import * as configUtils from './config-utils';
|
||||
|
||||
export function includeAndExcludeAnalysisPaths(config: configUtils.Config, languages: string[]) {
|
||||
if (config.paths.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_INCLUDE', config.paths.join('\n'));
|
||||
}
|
||||
if (config.paths.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_INCLUDE', config.paths.join('\n'));
|
||||
}
|
||||
|
||||
if (config.pathsIgnore.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_EXCLUDE', config.pathsIgnore.join('\n'));
|
||||
}
|
||||
if (config.pathsIgnore.length !== 0) {
|
||||
core.exportVariable('LGTM_INDEX_EXCLUDE', config.pathsIgnore.join('\n'));
|
||||
}
|
||||
|
||||
function isInterpretedLanguage(language): boolean {
|
||||
return language === 'javascript' || language === 'python';
|
||||
}
|
||||
function isInterpretedLanguage(language): boolean {
|
||||
return language === 'javascript' || language === 'python';
|
||||
}
|
||||
|
||||
// Index include/exclude only work in javascript and python
|
||||
// If some other language is detected/configured show a warning
|
||||
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) && !languages.every(isInterpretedLanguage)) {
|
||||
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
||||
}
|
||||
// Index include/exclude only work in javascript and python
|
||||
// If some other language is detected/configured show a warning
|
||||
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) && !languages.every(isInterpretedLanguage)) {
|
||||
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,14 +14,14 @@ const PATHS_IGNORE_PROPERTY = 'paths-ignore';
|
||||
const PATHS_PROPERTY = 'paths';
|
||||
|
||||
export class ExternalQuery {
|
||||
public repository: string;
|
||||
public ref: string;
|
||||
public path = '';
|
||||
public repository: string;
|
||||
public ref: string;
|
||||
public path = '';
|
||||
|
||||
constructor(repository: string, ref: string) {
|
||||
this.repository = repository;
|
||||
this.ref = ref;
|
||||
}
|
||||
constructor(repository: string, ref: string) {
|
||||
this.repository = repository;
|
||||
this.ref = ref;
|
||||
}
|
||||
}
|
||||
|
||||
// The set of acceptable values for built-in suites from the codeql bundle
|
||||
@@ -30,255 +30,255 @@ const builtinSuites = ['security-extended', 'security-and-quality'] as const;
|
||||
type BuiltInSuite = typeof builtinSuites[number];
|
||||
|
||||
export class Config {
|
||||
public name = "";
|
||||
public disableDefaultQueries = false;
|
||||
public additionalQueries: string[] = [];
|
||||
public externalQueries: ExternalQuery[] = [];
|
||||
public additionalSuites: BuiltInSuite[] = [];
|
||||
public pathsIgnore: string[] = [];
|
||||
public paths: string[] = [];
|
||||
public name = "";
|
||||
public disableDefaultQueries = false;
|
||||
public additionalQueries: string[] = [];
|
||||
public externalQueries: ExternalQuery[] = [];
|
||||
public additionalSuites: BuiltInSuite[] = [];
|
||||
public pathsIgnore: string[] = [];
|
||||
public paths: string[] = [];
|
||||
|
||||
public addQuery(configFile: string, queryUses: string) {
|
||||
// The logic for parsing the string is based on what actions does for
|
||||
// parsing the 'uses' actions in the workflow file
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
const localQueryPath = queryUses.slice(2);
|
||||
// Resolve the local path against the workspace so that when this is
|
||||
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
|
||||
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(absoluteQueryPath)) {
|
||||
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||
}
|
||||
|
||||
// Call this after checking file exists, because it'll fail if file doesn't exist
|
||||
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
|
||||
|
||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||
}
|
||||
|
||||
this.additionalQueries.push(absoluteQueryPath);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
|
||||
const suite = builtinSuites.find((suite) => suite === queryUses);
|
||||
if (suite) {
|
||||
this.additionalSuites.push(suite);
|
||||
return;
|
||||
} else {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
}
|
||||
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split('/');
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length < 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
if (tok.length > 3) {
|
||||
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
||||
}
|
||||
|
||||
// Check none of the parts of the repository name are empty
|
||||
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
|
||||
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
||||
if (tok.length === 3) {
|
||||
external.path = tok[2];
|
||||
}
|
||||
this.externalQueries.push(external);
|
||||
public addQuery(configFile: string, queryUses: string) {
|
||||
// The logic for parsing the string is based on what actions does for
|
||||
// parsing the 'uses' actions in the workflow file
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
const localQueryPath = queryUses.slice(2);
|
||||
// Resolve the local path against the workspace so that when this is
|
||||
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
|
||||
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(absoluteQueryPath)) {
|
||||
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||
}
|
||||
|
||||
// Call this after checking file exists, because it'll fail if file doesn't exist
|
||||
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
|
||||
|
||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||
}
|
||||
|
||||
this.additionalQueries.push(absoluteQueryPath);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
|
||||
const suite = builtinSuites.find((suite) => suite === queryUses);
|
||||
if (suite) {
|
||||
this.additionalSuites.push(suite);
|
||||
return;
|
||||
} else {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
}
|
||||
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split('/');
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length < 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
if (tok.length > 3) {
|
||||
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
||||
}
|
||||
|
||||
// Check none of the parts of the repository name are empty
|
||||
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
|
||||
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
||||
if (tok.length === 3) {
|
||||
external.path = tok[2];
|
||||
}
|
||||
this.externalQueries.push(external);
|
||||
}
|
||||
}
|
||||
|
||||
export function getNameInvalid(configFile: string): string {
|
||||
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
|
||||
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
|
||||
}
|
||||
|
||||
export function getDisableDefaultQueriesInvalid(configFile: string): string {
|
||||
return getConfigFilePropertyError(configFile, DISPLAY_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
|
||||
return getConfigFilePropertyError(configFile, DISPLAY_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
|
||||
}
|
||||
|
||||
export function getQueriesInvalid(configFile: string): string {
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
|
||||
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
|
||||
}
|
||||
|
||||
export function getQueryUsesInvalid(configFile: string, queryUses?: string): string {
|
||||
return getConfigFilePropertyError(
|
||||
configFile,
|
||||
QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY,
|
||||
'must be a built-in suite (' + builtinSuites.join(' or ') +
|
||||
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
|
||||
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
|
||||
return getConfigFilePropertyError(
|
||||
configFile,
|
||||
QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY,
|
||||
'must be a built-in suite (' + builtinSuites.join(' or ') +
|
||||
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
|
||||
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
|
||||
}
|
||||
|
||||
export function getPathsIgnoreInvalid(configFile: string): string {
|
||||
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
|
||||
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
|
||||
export function getPathsInvalid(configFile: string): string {
|
||||
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
|
||||
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
|
||||
}
|
||||
|
||||
export function getLocalPathOutsideOfRepository(configFile: string, localPath: string): string {
|
||||
return getConfigFilePropertyError(
|
||||
configFile,
|
||||
QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY,
|
||||
'is invalid as the local path "' + localPath + '" is outside of the repository');
|
||||
return getConfigFilePropertyError(
|
||||
configFile,
|
||||
QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY,
|
||||
'is invalid as the local path "' + localPath + '" is outside of the repository');
|
||||
}
|
||||
|
||||
export function getLocalPathDoesNotExist(configFile: string, localPath: string): string {
|
||||
return getConfigFilePropertyError(
|
||||
configFile,
|
||||
QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY,
|
||||
'is invalid as the local path "' + localPath + '" does not exist in the repository');
|
||||
return getConfigFilePropertyError(
|
||||
configFile,
|
||||
QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY,
|
||||
'is invalid as the local path "' + localPath + '" does not exist in the repository');
|
||||
}
|
||||
|
||||
export function getConfigFileOutsideWorkspaceErrorMessage(configFile: string): string {
|
||||
return 'The configuration file "' + configFile + '" is outside of the workspace';
|
||||
return 'The configuration file "' + configFile + '" is outside of the workspace';
|
||||
}
|
||||
|
||||
export function getConfigFileDoesNotExistErrorMessage(configFile: string): string {
|
||||
return 'The configuration file "' + configFile + '" does not exist';
|
||||
return 'The configuration file "' + configFile + '" does not exist';
|
||||
}
|
||||
|
||||
function getConfigFilePropertyError(configFile: string, property: string, error: string): string {
|
||||
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
|
||||
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
|
||||
}
|
||||
|
||||
function initConfig(): Config {
|
||||
let configFile = core.getInput('config-file');
|
||||
let configFile = core.getInput('config-file');
|
||||
|
||||
const config = new Config();
|
||||
|
||||
// If no config file was provided create an empty one
|
||||
if (configFile === '') {
|
||||
core.debug('No configuration file was provided');
|
||||
return config;
|
||||
}
|
||||
|
||||
// Treat the config file as relative to the workspace
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
configFile = path.resolve(workspacePath, configFile);
|
||||
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
}
|
||||
|
||||
// Error if the file does not exist
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||
}
|
||||
|
||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
|
||||
if (NAME_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
config.name = parsedYAML[NAME_PROPERTY];
|
||||
}
|
||||
|
||||
if (DISPLAY_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
||||
}
|
||||
config.disableDefaultQueries = parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY];
|
||||
}
|
||||
|
||||
if (QUERIES_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
}
|
||||
parsedYAML[QUERIES_PROPERTY].forEach(query => {
|
||||
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
config.addQuery(configFile, query[QUERIES_USES_PROPERTY]);
|
||||
});
|
||||
}
|
||||
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
config.pathsIgnore.push(path);
|
||||
});
|
||||
}
|
||||
|
||||
if (PATHS_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
config.paths.push(path);
|
||||
});
|
||||
}
|
||||
const config = new Config();
|
||||
|
||||
// If no config file was provided create an empty one
|
||||
if (configFile === '') {
|
||||
core.debug('No configuration file was provided');
|
||||
return config;
|
||||
}
|
||||
|
||||
// Treat the config file as relative to the workspace
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
configFile = path.resolve(workspacePath, configFile);
|
||||
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
}
|
||||
|
||||
// Error if the file does not exist
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||
}
|
||||
|
||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
|
||||
if (NAME_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
config.name = parsedYAML[NAME_PROPERTY];
|
||||
}
|
||||
|
||||
if (DISPLAY_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
||||
}
|
||||
config.disableDefaultQueries = parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY];
|
||||
}
|
||||
|
||||
if (QUERIES_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
}
|
||||
parsedYAML[QUERIES_PROPERTY].forEach(query => {
|
||||
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
config.addQuery(configFile, query[QUERIES_USES_PROPERTY]);
|
||||
});
|
||||
}
|
||||
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
config.pathsIgnore.push(path);
|
||||
});
|
||||
}
|
||||
|
||||
if (PATHS_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_PROPERTY].forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
config.paths.push(path);
|
||||
});
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
function getConfigFolder(): string {
|
||||
return util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
return util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
}
|
||||
|
||||
export function getConfigFile(): string {
|
||||
return path.join(getConfigFolder(), 'config');
|
||||
return path.join(getConfigFolder(), 'config');
|
||||
}
|
||||
|
||||
async function saveConfig(config: Config) {
|
||||
const configString = JSON.stringify(config);
|
||||
await io.mkdirP(getConfigFolder());
|
||||
fs.writeFileSync(getConfigFile(), configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
const configString = JSON.stringify(config);
|
||||
await io.mkdirP(getConfigFolder());
|
||||
fs.writeFileSync(getConfigFile(), configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
}
|
||||
|
||||
export async function loadConfig(): Promise<Config> {
|
||||
const configFile = getConfigFile();
|
||||
if (fs.existsSync(configFile)) {
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
core.debug(configString);
|
||||
return JSON.parse(configString);
|
||||
const configFile = getConfigFile();
|
||||
if (fs.existsSync(configFile)) {
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
core.debug(configString);
|
||||
return JSON.parse(configString);
|
||||
|
||||
} else {
|
||||
const config = initConfig();
|
||||
core.debug('Initialized config:');
|
||||
core.debug(JSON.stringify(config));
|
||||
await saveConfig(config);
|
||||
return config;
|
||||
}
|
||||
} else {
|
||||
const config = initConfig();
|
||||
core.debug('Initialized config:');
|
||||
core.debug(JSON.stringify(config));
|
||||
await saveConfig(config);
|
||||
return config;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,16 +7,16 @@ import * as externalQueries from "./external-queries";
|
||||
import * as util from "./util";
|
||||
|
||||
test("checkoutExternalQueries", async t => {
|
||||
let config = new configUtils.Config();
|
||||
config.externalQueries = [
|
||||
new configUtils.ExternalQuery("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b"),
|
||||
];
|
||||
let config = new configUtils.Config();
|
||||
config.externalQueries = [
|
||||
new configUtils.ExternalQuery("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b"),
|
||||
];
|
||||
|
||||
await util.withTmpDir(async tmpDir => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
await util.withTmpDir(async tmpDir => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
|
||||
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
|
||||
});
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
|
||||
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,185 +6,185 @@ import * as path from 'path';
|
||||
import * as fingerprints from './fingerprints';
|
||||
|
||||
function testHash(t: ava.Assertions, input: string, expectedHashes: string[]) {
|
||||
let index = 0;
|
||||
let callback = function (lineNumber: number, hash: string) {
|
||||
t.is(lineNumber, index + 1);
|
||||
t.is(hash, expectedHashes[index]);
|
||||
index++;
|
||||
};
|
||||
fingerprints.hash(callback, input);
|
||||
t.is(index, input.split(/\r\n|\r|\n/).length);
|
||||
let index = 0;
|
||||
let callback = function (lineNumber: number, hash: string) {
|
||||
t.is(lineNumber, index + 1);
|
||||
t.is(hash, expectedHashes[index]);
|
||||
index++;
|
||||
};
|
||||
fingerprints.hash(callback, input);
|
||||
t.is(index, input.split(/\r\n|\r|\n/).length);
|
||||
}
|
||||
|
||||
test('hash', (t: ava.Assertions) => {
|
||||
// Try empty file
|
||||
testHash(t, "", ["c129715d7a2bc9a3:1"]);
|
||||
// Try empty file
|
||||
testHash(t, "", ["c129715d7a2bc9a3:1"]);
|
||||
|
||||
// Try various combinations of newline characters
|
||||
testHash(
|
||||
t,
|
||||
" a\nb\n \t\tc\n d",
|
||||
[
|
||||
"271789c17abda88f:1",
|
||||
"54703d4cd895b18:1",
|
||||
"180aee12dab6264:1",
|
||||
"a23a3dc5e078b07b:1"
|
||||
]);
|
||||
testHash(
|
||||
t,
|
||||
" hello; \t\nworld!!!\n\n\n \t\tGreetings\n End",
|
||||
[
|
||||
"8b7cf3e952e7aeb2:1",
|
||||
"b1ae1287ec4718d9:1",
|
||||
"bff680108adb0fcc:1",
|
||||
"c6805c5e1288b612:1",
|
||||
"b86d3392aea1be30:1",
|
||||
"e6ceba753e1a442:1",
|
||||
]);
|
||||
testHash(
|
||||
t,
|
||||
" hello; \t\nworld!!!\n\n\n \t\tGreetings\n End\n",
|
||||
[
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(
|
||||
t,
|
||||
" hello; \t\nworld!!!\r\r\r \t\tGreetings\r End\r",
|
||||
[
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(
|
||||
t,
|
||||
" hello; \t\r\nworld!!!\r\n\r\n\r\n \t\tGreetings\r\n End\r\n",
|
||||
[
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(
|
||||
t,
|
||||
" hello; \t\nworld!!!\r\n\n\r \t\tGreetings\r End\r\n",
|
||||
[
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
// Try various combinations of newline characters
|
||||
testHash(
|
||||
t,
|
||||
" a\nb\n \t\tc\n d",
|
||||
[
|
||||
"271789c17abda88f:1",
|
||||
"54703d4cd895b18:1",
|
||||
"180aee12dab6264:1",
|
||||
"a23a3dc5e078b07b:1"
|
||||
]);
|
||||
testHash(
|
||||
t,
|
||||
" hello; \t\nworld!!!\n\n\n \t\tGreetings\n End",
|
||||
[
|
||||
"8b7cf3e952e7aeb2:1",
|
||||
"b1ae1287ec4718d9:1",
|
||||
"bff680108adb0fcc:1",
|
||||
"c6805c5e1288b612:1",
|
||||
"b86d3392aea1be30:1",
|
||||
"e6ceba753e1a442:1",
|
||||
]);
|
||||
testHash(
|
||||
t,
|
||||
" hello; \t\nworld!!!\n\n\n \t\tGreetings\n End\n",
|
||||
[
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(
|
||||
t,
|
||||
" hello; \t\nworld!!!\r\r\r \t\tGreetings\r End\r",
|
||||
[
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(
|
||||
t,
|
||||
" hello; \t\r\nworld!!!\r\n\r\n\r\n \t\tGreetings\r\n End\r\n",
|
||||
[
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
testHash(
|
||||
t,
|
||||
" hello; \t\nworld!!!\r\n\n\r \t\tGreetings\r End\r\n",
|
||||
[
|
||||
"e9496ae3ebfced30:1",
|
||||
"fb7c023a8b9ccb3f:1",
|
||||
"ce8ba1a563dcdaca:1",
|
||||
"e20e36e16fcb0cc8:1",
|
||||
"b3edc88f2938467e:1",
|
||||
"c8e28b0b4002a3a0:1",
|
||||
"c129715d7a2bc9a3:1",
|
||||
]);
|
||||
|
||||
// Try repeating line that will generate identical hashes
|
||||
testHash(
|
||||
t,
|
||||
"Lorem ipsum dolor sit amet.\n".repeat(10),
|
||||
[
|
||||
"a7f2ff13bc495cf2:1",
|
||||
"a7f2ff13bc495cf2:2",
|
||||
"a7f2ff13bc495cf2:3",
|
||||
"a7f2ff13bc495cf2:4",
|
||||
"a7f2ff13bc495cf2:5",
|
||||
"a7f2ff13bc495cf2:6",
|
||||
"a7f2ff1481e87703:1",
|
||||
"a9cf91f7bbf1862b:1",
|
||||
"55ec222b86bcae53:1",
|
||||
"cc97dc7b1d7d8f7b:1",
|
||||
"c129715d7a2bc9a3:1"
|
||||
]);
|
||||
// Try repeating line that will generate identical hashes
|
||||
testHash(
|
||||
t,
|
||||
"Lorem ipsum dolor sit amet.\n".repeat(10),
|
||||
[
|
||||
"a7f2ff13bc495cf2:1",
|
||||
"a7f2ff13bc495cf2:2",
|
||||
"a7f2ff13bc495cf2:3",
|
||||
"a7f2ff13bc495cf2:4",
|
||||
"a7f2ff13bc495cf2:5",
|
||||
"a7f2ff13bc495cf2:6",
|
||||
"a7f2ff1481e87703:1",
|
||||
"a9cf91f7bbf1862b:1",
|
||||
"55ec222b86bcae53:1",
|
||||
"cc97dc7b1d7d8f7b:1",
|
||||
"c129715d7a2bc9a3:1"
|
||||
]);
|
||||
});
|
||||
|
||||
function testResolveUriToFile(uri: any, index: any, artifactsURIs: any[]) {
|
||||
const location = { "uri": uri, "index": index };
|
||||
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
|
||||
return fingerprints.resolveUriToFile(location, artifacts);
|
||||
const location = { "uri": uri, "index": index };
|
||||
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
|
||||
return fingerprints.resolveUriToFile(location, artifacts);
|
||||
}
|
||||
|
||||
test('resolveUriToFile', t => {
|
||||
// The resolveUriToFile method checks that the file exists and is in the right directory
|
||||
// so we need to give it real files to look at. We will use this file as an example.
|
||||
// For this to work we require the current working directory to be a parent, but this
|
||||
// should generally always be the case so this is fine.
|
||||
const cwd = process.cwd();
|
||||
const filepath = __filename;
|
||||
t.true(filepath.startsWith(cwd + '/'));
|
||||
const relativeFilepaht = filepath.substring(cwd.length + 1);
|
||||
// The resolveUriToFile method checks that the file exists and is in the right directory
|
||||
// so we need to give it real files to look at. We will use this file as an example.
|
||||
// For this to work we require the current working directory to be a parent, but this
|
||||
// should generally always be the case so this is fine.
|
||||
const cwd = process.cwd();
|
||||
const filepath = __filename;
|
||||
t.true(filepath.startsWith(cwd + '/'));
|
||||
const relativeFilepaht = filepath.substring(cwd.length + 1);
|
||||
|
||||
process.env['GITHUB_WORKSPACE'] = cwd;
|
||||
process.env['GITHUB_WORKSPACE'] = cwd;
|
||||
|
||||
// Absolute paths are unmodified
|
||||
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
|
||||
// Absolute paths are unmodified
|
||||
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
|
||||
|
||||
// Relative paths are made absolute
|
||||
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
|
||||
// Relative paths are made absolute
|
||||
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
|
||||
t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
|
||||
|
||||
// Absolute paths outside the src root are discarded
|
||||
t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
|
||||
t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
|
||||
// Absolute paths outside the src root are discarded
|
||||
t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
|
||||
t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
|
||||
|
||||
// Other schemes are discarded
|
||||
t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
|
||||
t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
|
||||
// Other schemes are discarded
|
||||
t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
|
||||
t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
|
||||
|
||||
// Invalid URIs are discarded
|
||||
t.is(testResolveUriToFile(1, undefined, []), undefined);
|
||||
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
|
||||
// Invalid URIs are discarded
|
||||
t.is(testResolveUriToFile(1, undefined, []), undefined);
|
||||
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
|
||||
|
||||
// Non-existant files are discarded
|
||||
t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
|
||||
// Non-existant files are discarded
|
||||
t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
|
||||
|
||||
// Index is resolved
|
||||
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
|
||||
t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
|
||||
// Index is resolved
|
||||
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
|
||||
t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
|
||||
|
||||
// Invalid indexes are discarded
|
||||
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
|
||||
t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
|
||||
// Invalid indexes are discarded
|
||||
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
|
||||
t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
|
||||
});
|
||||
|
||||
test('addFingerprints', t => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
|
||||
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
|
||||
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
|
||||
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
|
||||
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||
});
|
||||
|
||||
test('missingRegions', t => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
|
||||
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
|
||||
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
|
||||
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||
|
||||
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||
});
|
||||
|
||||
@@ -11,11 +11,11 @@ const MOD = Long.fromInt(37); // L
|
||||
|
||||
// Compute the starting point for the hash mod
|
||||
function computeFirstMod(): Long {
|
||||
let firstMod = Long.ONE; // L
|
||||
for (let i = 0; i < BLOCK_SIZE; i++) {
|
||||
firstMod = firstMod.multiply(MOD);
|
||||
}
|
||||
return firstMod;
|
||||
let firstMod = Long.ONE; // L
|
||||
for (let i = 0; i < BLOCK_SIZE; i++) {
|
||||
firstMod = firstMod.multiply(MOD);
|
||||
}
|
||||
return firstMod;
|
||||
}
|
||||
|
||||
// Type signature of callback passed to hash function.
|
||||
@@ -35,125 +35,125 @@ type hashCallback = (lineNumber: number, hash: string) => void;
|
||||
* @param input The file's contents
|
||||
*/
|
||||
export function hash(callback: hashCallback, input: string) {
|
||||
// A rolling view in to the input
|
||||
const window = Array(BLOCK_SIZE).fill(0);
|
||||
// A rolling view in to the input
|
||||
const window = Array(BLOCK_SIZE).fill(0);
|
||||
|
||||
// If the character in the window is the start of a new line
|
||||
// then records the line number, otherwise will be -1.
|
||||
// Indexes match up with those from the window variable.
|
||||
const lineNumbers = Array(BLOCK_SIZE).fill(-1);
|
||||
// If the character in the window is the start of a new line
|
||||
// then records the line number, otherwise will be -1.
|
||||
// Indexes match up with those from the window variable.
|
||||
const lineNumbers = Array(BLOCK_SIZE).fill(-1);
|
||||
|
||||
// The current hash value, updated as we read each character
|
||||
let hash = Long.ZERO;
|
||||
let firstMod = computeFirstMod();
|
||||
// The current hash value, updated as we read each character
|
||||
let hash = Long.ZERO;
|
||||
let firstMod = computeFirstMod();
|
||||
|
||||
// The current index in the window, will wrap around to zero when we reach BLOCK_SIZE
|
||||
let index = 0;
|
||||
// The line number of the character we are currently processing from the input
|
||||
let lineNumber = 0;
|
||||
// Is the next character to be read the start of a new line
|
||||
let lineStart = true;
|
||||
// Was the previous character a CR (carriage return)
|
||||
let prevCR = false;
|
||||
// A map of hashes we've seen before and how many times,
|
||||
// so we can disambiguate identical hashes
|
||||
const hashCounts: { [hashValue: string]: number } = {};
|
||||
// The current index in the window, will wrap around to zero when we reach BLOCK_SIZE
|
||||
let index = 0;
|
||||
// The line number of the character we are currently processing from the input
|
||||
let lineNumber = 0;
|
||||
// Is the next character to be read the start of a new line
|
||||
let lineStart = true;
|
||||
// Was the previous character a CR (carriage return)
|
||||
let prevCR = false;
|
||||
// A map of hashes we've seen before and how many times,
|
||||
// so we can disambiguate identical hashes
|
||||
const hashCounts: { [hashValue: string]: number } = {};
|
||||
|
||||
// Output the current hash and line number to the callback function
|
||||
const outputHash = function () {
|
||||
let hashValue = hash.toUnsigned().toString(16);
|
||||
if (!hashCounts[hashValue]) {
|
||||
hashCounts[hashValue] = 0;
|
||||
}
|
||||
hashCounts[hashValue]++;
|
||||
callback(lineNumbers[index], hashValue + ":" + hashCounts[hashValue]);
|
||||
lineNumbers[index] = -1;
|
||||
};
|
||||
|
||||
// Update the current hash value and increment the index in the window
|
||||
const updateHash = function (current: number) {
|
||||
const begin = window[index];
|
||||
window[index] = current;
|
||||
hash = MOD.multiply(hash)
|
||||
.add(Long.fromInt(current))
|
||||
.subtract(firstMod.multiply(Long.fromInt(begin)));
|
||||
|
||||
index = (index + 1) % BLOCK_SIZE;
|
||||
};
|
||||
|
||||
// First process every character in the input, updating the hash and lineNumbers
|
||||
// as we go. Once we reach a point in the window again then we've processed
|
||||
// BLOCK_SIZE characters and if the last character at this point in the window
|
||||
// was the start of a line then we should output the hash for that line.
|
||||
for (let i = 0, len = input.length; i <= len; i++) {
|
||||
let current = i === len ? 65535 : input.charCodeAt(i);
|
||||
// skip tabs, spaces, and line feeds that come directly after a carriage return
|
||||
if (current === space || current === tab || (prevCR && current === lf)) {
|
||||
prevCR = false;
|
||||
continue;
|
||||
}
|
||||
// replace CR with LF
|
||||
if (current === cr) {
|
||||
current = lf;
|
||||
prevCR = true;
|
||||
} else {
|
||||
prevCR = false;
|
||||
}
|
||||
if (lineNumbers[index] !== -1) {
|
||||
outputHash();
|
||||
}
|
||||
if (lineStart) {
|
||||
lineStart = false;
|
||||
lineNumber++;
|
||||
lineNumbers[index] = lineNumber;
|
||||
}
|
||||
if (current === lf) {
|
||||
lineStart = true;
|
||||
}
|
||||
updateHash(current);
|
||||
// Output the current hash and line number to the callback function
|
||||
const outputHash = function () {
|
||||
let hashValue = hash.toUnsigned().toString(16);
|
||||
if (!hashCounts[hashValue]) {
|
||||
hashCounts[hashValue] = 0;
|
||||
}
|
||||
hashCounts[hashValue]++;
|
||||
callback(lineNumbers[index], hashValue + ":" + hashCounts[hashValue]);
|
||||
lineNumbers[index] = -1;
|
||||
};
|
||||
|
||||
// Flush the remaining lines
|
||||
for (let i = 0; i < BLOCK_SIZE; i++) {
|
||||
if (lineNumbers[index] !== -1) {
|
||||
outputHash();
|
||||
}
|
||||
updateHash(0);
|
||||
// Update the current hash value and increment the index in the window
|
||||
const updateHash = function (current: number) {
|
||||
const begin = window[index];
|
||||
window[index] = current;
|
||||
hash = MOD.multiply(hash)
|
||||
.add(Long.fromInt(current))
|
||||
.subtract(firstMod.multiply(Long.fromInt(begin)));
|
||||
|
||||
index = (index + 1) % BLOCK_SIZE;
|
||||
};
|
||||
|
||||
// First process every character in the input, updating the hash and lineNumbers
|
||||
// as we go. Once we reach a point in the window again then we've processed
|
||||
// BLOCK_SIZE characters and if the last character at this point in the window
|
||||
// was the start of a line then we should output the hash for that line.
|
||||
for (let i = 0, len = input.length; i <= len; i++) {
|
||||
let current = i === len ? 65535 : input.charCodeAt(i);
|
||||
// skip tabs, spaces, and line feeds that come directly after a carriage return
|
||||
if (current === space || current === tab || (prevCR && current === lf)) {
|
||||
prevCR = false;
|
||||
continue;
|
||||
}
|
||||
// replace CR with LF
|
||||
if (current === cr) {
|
||||
current = lf;
|
||||
prevCR = true;
|
||||
} else {
|
||||
prevCR = false;
|
||||
}
|
||||
if (lineNumbers[index] !== -1) {
|
||||
outputHash();
|
||||
}
|
||||
if (lineStart) {
|
||||
lineStart = false;
|
||||
lineNumber++;
|
||||
lineNumbers[index] = lineNumber;
|
||||
}
|
||||
if (current === lf) {
|
||||
lineStart = true;
|
||||
}
|
||||
updateHash(current);
|
||||
}
|
||||
|
||||
// Flush the remaining lines
|
||||
for (let i = 0; i < BLOCK_SIZE; i++) {
|
||||
if (lineNumbers[index] !== -1) {
|
||||
outputHash();
|
||||
}
|
||||
updateHash(0);
|
||||
}
|
||||
}
|
||||
|
||||
// Generate a hash callback function that updates the given result in-place
|
||||
// when it recieves a hash for the correct line number. Ignores hashes for other lines.
|
||||
function locationUpdateCallback(result: any, location: any): hashCallback {
|
||||
let locationStartLine = location.physicalLocation?.region?.startLine;
|
||||
if (locationStartLine === undefined) {
|
||||
// We expect the region section to be present, but it can be absent if the
|
||||
// alert pertains to the entire file. In this case, we compute the fingerprint
|
||||
// using the hash of the first line of the file.
|
||||
locationStartLine = 1;
|
||||
let locationStartLine = location.physicalLocation?.region?.startLine;
|
||||
if (locationStartLine === undefined) {
|
||||
// We expect the region section to be present, but it can be absent if the
|
||||
// alert pertains to the entire file. In this case, we compute the fingerprint
|
||||
// using the hash of the first line of the file.
|
||||
locationStartLine = 1;
|
||||
}
|
||||
return function (lineNumber: number, hash: string) {
|
||||
// Ignore hashes for lines that don't concern us
|
||||
if (locationStartLine !== lineNumber) {
|
||||
return;
|
||||
}
|
||||
return function (lineNumber: number, hash: string) {
|
||||
// Ignore hashes for lines that don't concern us
|
||||
if (locationStartLine !== lineNumber) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!result.partialFingerprints) {
|
||||
result.partialFingerprints = {};
|
||||
}
|
||||
const existingFingerprint = result.partialFingerprints.primaryLocationLineHash;
|
||||
if (!result.partialFingerprints) {
|
||||
result.partialFingerprints = {};
|
||||
}
|
||||
const existingFingerprint = result.partialFingerprints.primaryLocationLineHash;
|
||||
|
||||
// If the hash doesn't match the existing fingerprint then
|
||||
// output a warning and don't overwrite it.
|
||||
if (!existingFingerprint) {
|
||||
result.partialFingerprints.primaryLocationLineHash = hash;
|
||||
} else if (existingFingerprint !== hash) {
|
||||
core.warning("Calculated fingerprint of " + hash +
|
||||
" for file " + location.physicalLocation.artifactLocation.uri +
|
||||
" line " + lineNumber +
|
||||
", but found existing inconsistent fingerprint value " + existingFingerprint);
|
||||
}
|
||||
};
|
||||
// If the hash doesn't match the existing fingerprint then
|
||||
// output a warning and don't overwrite it.
|
||||
if (!existingFingerprint) {
|
||||
result.partialFingerprints.primaryLocationLineHash = hash;
|
||||
} else if (existingFingerprint !== hash) {
|
||||
core.warning("Calculated fingerprint of " + hash +
|
||||
" for file " + location.physicalLocation.artifactLocation.uri +
|
||||
" line " + lineNumber +
|
||||
", but found existing inconsistent fingerprint value " + existingFingerprint);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Can we fingerprint the given location. This requires access to
|
||||
@@ -161,100 +161,100 @@ function locationUpdateCallback(result: any, location: any): hashCallback {
|
||||
// If possible returns a absolute file path for the source file,
|
||||
// or if not possible then returns undefined.
|
||||
export function resolveUriToFile(location: any, artifacts: any[]): string | undefined {
|
||||
// This may be referencing an artifact
|
||||
if (!location.uri && location.index !== undefined) {
|
||||
if (typeof location.index !== 'number' ||
|
||||
location.index < 0 ||
|
||||
location.index >= artifacts.length ||
|
||||
typeof artifacts[location.index].location !== 'object') {
|
||||
core.debug('Ignoring location as index "' + location.index + '" is invalid');
|
||||
return undefined;
|
||||
}
|
||||
location = artifacts[location.index].location;
|
||||
// This may be referencing an artifact
|
||||
if (!location.uri && location.index !== undefined) {
|
||||
if (typeof location.index !== 'number' ||
|
||||
location.index < 0 ||
|
||||
location.index >= artifacts.length ||
|
||||
typeof artifacts[location.index].location !== 'object') {
|
||||
core.debug('Ignoring location as index "' + location.index + '" is invalid');
|
||||
return undefined;
|
||||
}
|
||||
location = artifacts[location.index].location;
|
||||
}
|
||||
|
||||
// Get the URI and decode
|
||||
if (typeof location.uri !== 'string') {
|
||||
core.debug('Ignoring location as uri "' + location.uri + '" is invalid');
|
||||
return undefined;
|
||||
}
|
||||
let uri = decodeURIComponent(location.uri);
|
||||
// Get the URI and decode
|
||||
if (typeof location.uri !== 'string') {
|
||||
core.debug('Ignoring location as uri "' + location.uri + '" is invalid');
|
||||
return undefined;
|
||||
}
|
||||
let uri = decodeURIComponent(location.uri);
|
||||
|
||||
// Remove a file scheme, and abort if the scheme is anything else
|
||||
const fileUriPrefix = 'file://';
|
||||
if (uri.startsWith(fileUriPrefix)) {
|
||||
uri = uri.substring(fileUriPrefix.length);
|
||||
}
|
||||
if (uri.indexOf('://') !== -1) {
|
||||
core.debug('Ignoring location URI "' + uri + "' as the scheme is not recognised");
|
||||
return undefined;
|
||||
}
|
||||
// Remove a file scheme, and abort if the scheme is anything else
|
||||
const fileUriPrefix = 'file://';
|
||||
if (uri.startsWith(fileUriPrefix)) {
|
||||
uri = uri.substring(fileUriPrefix.length);
|
||||
}
|
||||
if (uri.indexOf('://') !== -1) {
|
||||
core.debug('Ignoring location URI "' + uri + "' as the scheme is not recognised");
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Discard any absolute paths that aren't in the src root
|
||||
const srcRootPrefix = process.env['GITHUB_WORKSPACE'] + '/';
|
||||
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
|
||||
core.debug('Ignoring location URI "' + uri + "' as it is outside of the src root");
|
||||
return undefined;
|
||||
}
|
||||
// Discard any absolute paths that aren't in the src root
|
||||
const srcRootPrefix = process.env['GITHUB_WORKSPACE'] + '/';
|
||||
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
|
||||
core.debug('Ignoring location URI "' + uri + "' as it is outside of the src root");
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Just assume a relative path is relative to the src root.
|
||||
// This is not necessarily true but should be a good approximation
|
||||
// and here we likely want to err on the side of handling more cases.
|
||||
if (!uri.startsWith('/')) {
|
||||
uri = srcRootPrefix + uri;
|
||||
}
|
||||
// Just assume a relative path is relative to the src root.
|
||||
// This is not necessarily true but should be a good approximation
|
||||
// and here we likely want to err on the side of handling more cases.
|
||||
if (!uri.startsWith('/')) {
|
||||
uri = srcRootPrefix + uri;
|
||||
}
|
||||
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(uri)) {
|
||||
core.debug("Unable to compute fingerprint for non-existent file: " + uri);
|
||||
return undefined;
|
||||
}
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(uri)) {
|
||||
core.debug("Unable to compute fingerprint for non-existent file: " + uri);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return uri;
|
||||
return uri;
|
||||
}
|
||||
|
||||
// Compute fingerprints for results in the given sarif file
|
||||
// and return an updated sarif file contents.
|
||||
export function addFingerprints(sarifContents: string): string {
|
||||
let sarif = JSON.parse(sarifContents);
|
||||
let sarif = JSON.parse(sarifContents);
|
||||
|
||||
// Gather together results for the same file and construct
|
||||
// callbacks to accept hashes for that file and update the location
|
||||
const callbacksByFile: { [filename: string]: hashCallback[] } = {};
|
||||
for (const run of sarif.runs || []) {
|
||||
// We may need the list of artifacts to resolve against
|
||||
let artifacts = run.artifacts || [];
|
||||
// Gather together results for the same file and construct
|
||||
// callbacks to accept hashes for that file and update the location
|
||||
const callbacksByFile: { [filename: string]: hashCallback[] } = {};
|
||||
for (const run of sarif.runs || []) {
|
||||
// We may need the list of artifacts to resolve against
|
||||
let artifacts = run.artifacts || [];
|
||||
|
||||
for (const result of run.results || []) {
|
||||
// Check the primary location is defined correctly and is in the src root
|
||||
const primaryLocation = (result.locations || [])[0];
|
||||
if (!primaryLocation ||
|
||||
!primaryLocation.physicalLocation ||
|
||||
!primaryLocation.physicalLocation.artifactLocation) {
|
||||
core.debug("Unable to compute fingerprint for invalid location: " + JSON.stringify(primaryLocation));
|
||||
continue;
|
||||
}
|
||||
for (const result of run.results || []) {
|
||||
// Check the primary location is defined correctly and is in the src root
|
||||
const primaryLocation = (result.locations || [])[0];
|
||||
if (!primaryLocation ||
|
||||
!primaryLocation.physicalLocation ||
|
||||
!primaryLocation.physicalLocation.artifactLocation) {
|
||||
core.debug("Unable to compute fingerprint for invalid location: " + JSON.stringify(primaryLocation));
|
||||
continue;
|
||||
}
|
||||
|
||||
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts);
|
||||
if (!filepath) {
|
||||
continue;
|
||||
}
|
||||
if (!callbacksByFile[filepath]) {
|
||||
callbacksByFile[filepath] = [];
|
||||
}
|
||||
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation));
|
||||
}
|
||||
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts);
|
||||
if (!filepath) {
|
||||
continue;
|
||||
}
|
||||
if (!callbacksByFile[filepath]) {
|
||||
callbacksByFile[filepath] = [];
|
||||
}
|
||||
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation));
|
||||
}
|
||||
}
|
||||
|
||||
// Now hash each file that was found
|
||||
Object.entries(callbacksByFile).forEach(([filepath, callbacks]) => {
|
||||
// A callback that forwards the hash to all other callbacks for that file
|
||||
const teeCallback = function (lineNumber: number, hash: string) {
|
||||
Object.values(callbacks).forEach(c => c(lineNumber, hash));
|
||||
};
|
||||
const fileContents = fs.readFileSync(filepath).toString();
|
||||
hash(teeCallback, fileContents);
|
||||
});
|
||||
// Now hash each file that was found
|
||||
Object.entries(callbacksByFile).forEach(([filepath, callbacks]) => {
|
||||
// A callback that forwards the hash to all other callbacks for that file
|
||||
const teeCallback = function (lineNumber: number, hash: string) {
|
||||
Object.values(callbacks).forEach(c => c(lineNumber, hash));
|
||||
};
|
||||
const fileContents = fs.readFileSync(filepath).toString();
|
||||
hash(teeCallback, fileContents);
|
||||
});
|
||||
|
||||
return JSON.stringify(sarif);
|
||||
return JSON.stringify(sarif);
|
||||
}
|
||||
|
||||
@@ -8,55 +8,55 @@ import * as util from './util';
|
||||
|
||||
test('download codeql bundle cache', async t => {
|
||||
|
||||
await util.withTmpDir(async tmpDir => {
|
||||
await util.withTmpDir(async tmpDir => {
|
||||
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
|
||||
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
|
||||
process.env['RUNNER_TEMP'] = path.join(tmpDir, 'temp');
|
||||
process.env['RUNNER_TOOL_CACHE'] = path.join(tmpDir, 'cache');
|
||||
|
||||
const versions = ['20200601', '20200610'];
|
||||
const versions = ['20200601', '20200610'];
|
||||
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
const version = versions[i];
|
||||
for (let i = 0; i < versions.length; i++) {
|
||||
const version = versions[i];
|
||||
|
||||
nock('https://example.com')
|
||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
nock('https://example.com')
|
||||
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
|
||||
|
||||
|
||||
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||
process.env['INPUT_TOOLS'] = `https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`;
|
||||
|
||||
await setupTools.setupCodeQL();
|
||||
await setupTools.setupCodeQL();
|
||||
|
||||
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
|
||||
}
|
||||
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
|
||||
}
|
||||
|
||||
const cachedVersions = toolcache.findAllVersions('CodeQL');
|
||||
const cachedVersions = toolcache.findAllVersions('CodeQL');
|
||||
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
t.is(cachedVersions.length, 2);
|
||||
});
|
||||
});
|
||||
|
||||
test('parse codeql bundle url version', t => {
|
||||
|
||||
const tests = {
|
||||
'20200601': '0.0.0-20200601',
|
||||
'20200601.0': '0.0.0-20200601.0',
|
||||
'20200601.0.0': '20200601.0.0',
|
||||
'1.2.3': '1.2.3',
|
||||
'1.2.3-alpha': '1.2.3-alpha',
|
||||
'1.2.3-beta.1': '1.2.3-beta.1',
|
||||
};
|
||||
const tests = {
|
||||
'20200601': '0.0.0-20200601',
|
||||
'20200601.0': '0.0.0-20200601.0',
|
||||
'20200601.0.0': '20200601.0.0',
|
||||
'1.2.3': '1.2.3',
|
||||
'1.2.3-alpha': '1.2.3-alpha',
|
||||
'1.2.3-beta.1': '1.2.3-beta.1',
|
||||
};
|
||||
|
||||
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||
const url = `https://github.com/.../codeql-bundle-${version}/...`;
|
||||
for (const [version, expectedVersion] of Object.entries(tests)) {
|
||||
const url = `https://github.com/.../codeql-bundle-${version}/...`;
|
||||
|
||||
try {
|
||||
const parsedVersion = setupTools.getCodeQLURLVersion(url);
|
||||
t.deepEqual(parsedVersion, expectedVersion);
|
||||
} catch (e) {
|
||||
t.fail(e.message);
|
||||
}
|
||||
try {
|
||||
const parsedVersion = setupTools.getCodeQLURLVersion(url);
|
||||
t.deepEqual(parsedVersion, expectedVersion);
|
||||
} catch (e) {
|
||||
t.fail(e.message);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -4,70 +4,70 @@ import * as path from 'path';
|
||||
import * as semver from 'semver';
|
||||
|
||||
export class CodeQLSetup {
|
||||
public dist: string;
|
||||
public tools: string;
|
||||
public cmd: string;
|
||||
public platform: string;
|
||||
public dist: string;
|
||||
public tools: string;
|
||||
public cmd: string;
|
||||
public platform: string;
|
||||
|
||||
constructor(codeqlDist: string) {
|
||||
this.dist = codeqlDist;
|
||||
this.tools = path.join(this.dist, 'tools');
|
||||
this.cmd = path.join(codeqlDist, 'codeql');
|
||||
// TODO check process.arch ?
|
||||
if (process.platform === 'win32') {
|
||||
this.platform = 'win64';
|
||||
if (this.cmd.endsWith('codeql')) {
|
||||
this.cmd += ".cmd";
|
||||
}
|
||||
} else if (process.platform === 'linux') {
|
||||
this.platform = 'linux64';
|
||||
} else if (process.platform === 'darwin') {
|
||||
this.platform = 'osx64';
|
||||
} else {
|
||||
throw new Error("Unsupported plaform: " + process.platform);
|
||||
}
|
||||
constructor(codeqlDist: string) {
|
||||
this.dist = codeqlDist;
|
||||
this.tools = path.join(this.dist, 'tools');
|
||||
this.cmd = path.join(codeqlDist, 'codeql');
|
||||
// TODO check process.arch ?
|
||||
if (process.platform === 'win32') {
|
||||
this.platform = 'win64';
|
||||
if (this.cmd.endsWith('codeql')) {
|
||||
this.cmd += ".exe";
|
||||
}
|
||||
} else if (process.platform === 'linux') {
|
||||
this.platform = 'linux64';
|
||||
} else if (process.platform === 'darwin') {
|
||||
this.platform = 'osx64';
|
||||
} else {
|
||||
throw new Error("Unsupported plaform: " + process.platform);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function setupCodeQL(): Promise<CodeQLSetup> {
|
||||
try {
|
||||
const codeqlURL = core.getInput('tools', { required: true });
|
||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
|
||||
try {
|
||||
const codeqlURL = core.getInput('tools', { required: true });
|
||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
|
||||
|
||||
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
} else {
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
|
||||
}
|
||||
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||
|
||||
} catch (e) {
|
||||
core.error(e);
|
||||
throw new Error("Unable to download and extract CodeQL CLI");
|
||||
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
} else {
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
|
||||
}
|
||||
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||
|
||||
} catch (e) {
|
||||
core.error(e);
|
||||
throw new Error("Unable to download and extract CodeQL CLI");
|
||||
}
|
||||
}
|
||||
|
||||
export function getCodeQLURLVersion(url: string): string {
|
||||
|
||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||
if (match === null || match.length < 2) {
|
||||
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
|
||||
}
|
||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
||||
if (match === null || match.length < 2) {
|
||||
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
|
||||
}
|
||||
|
||||
let version = match[1];
|
||||
let version = match[1];
|
||||
|
||||
if (!semver.valid(version)) {
|
||||
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||
version = '0.0.0-' + version;
|
||||
}
|
||||
if (!semver.valid(version)) {
|
||||
core.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||
version = '0.0.0-' + version;
|
||||
}
|
||||
|
||||
const s = semver.clean(version);
|
||||
if (!s) {
|
||||
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
|
||||
}
|
||||
const s = semver.clean(version);
|
||||
if (!s) {
|
||||
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
|
||||
}
|
||||
|
||||
return s;
|
||||
return s;
|
||||
}
|
||||
|
||||
@@ -11,247 +11,255 @@ import * as sharedEnv from './shared-environment';
|
||||
import * as util from './util';
|
||||
|
||||
type TracerConfig = {
|
||||
spec: string;
|
||||
env: { [key: string]: string };
|
||||
spec: string;
|
||||
env: { [key: string]: string };
|
||||
};
|
||||
|
||||
const CRITICAL_TRACER_VARS = new Set(
|
||||
['SEMMLE_PRELOAD_libtrace',
|
||||
, 'SEMMLE_RUNNER',
|
||||
, 'SEMMLE_COPY_EXECUTABLES_ROOT',
|
||||
, 'SEMMLE_DEPTRACE_SOCKET',
|
||||
, 'SEMMLE_JAVA_TOOL_OPTIONS'
|
||||
]);
|
||||
['SEMMLE_PRELOAD_libtrace',
|
||||
, 'SEMMLE_RUNNER',
|
||||
, 'SEMMLE_COPY_EXECUTABLES_ROOT',
|
||||
, 'SEMMLE_DEPTRACE_SOCKET',
|
||||
, 'SEMMLE_JAVA_TOOL_OPTIONS'
|
||||
]);
|
||||
|
||||
async function tracerConfig(
|
||||
codeql: setuptools.CodeQLSetup,
|
||||
database: string,
|
||||
compilerSpec?: string): Promise<TracerConfig> {
|
||||
codeql: setuptools.CodeQLSetup,
|
||||
database: string,
|
||||
compilerSpec?: string): Promise<TracerConfig> {
|
||||
|
||||
const compilerSpecArg = compilerSpec ? ["--compiler-spec=" + compilerSpec] : [];
|
||||
const compilerSpecArg = compilerSpec ? ["--compiler-spec=" + compilerSpec] : [];
|
||||
|
||||
let envFile = path.resolve(database, 'working', 'env.tmp');
|
||||
await exec.exec(codeql.cmd, ['database', 'trace-command', database,
|
||||
...compilerSpecArg,
|
||||
process.execPath, path.resolve(__dirname, 'tracer-env.js'), envFile]
|
||||
);
|
||||
let envFile = path.resolve(database, 'working', 'env.tmp');
|
||||
await exec.exec(codeql.cmd, ['database', 'trace-command', database,
|
||||
...compilerSpecArg,
|
||||
process.execPath, path.resolve(__dirname, 'tracer-env.js'), envFile]
|
||||
);
|
||||
|
||||
const env: { [key: string]: string } = JSON.parse(fs.readFileSync(envFile, 'utf-8'));
|
||||
const env: { [key: string]: string } = JSON.parse(fs.readFileSync(envFile, 'utf-8'));
|
||||
|
||||
const config = env['ODASA_TRACER_CONFIGURATION'];
|
||||
const info: TracerConfig = { spec: config, env: {} };
|
||||
const config = env['ODASA_TRACER_CONFIGURATION'];
|
||||
const info: TracerConfig = { spec: config, env: {} };
|
||||
|
||||
// Extract critical tracer variables from the environment
|
||||
for (let entry of Object.entries(env)) {
|
||||
const key = entry[0];
|
||||
const value = entry[1];
|
||||
// skip ODASA_TRACER_CONFIGURATION as it is handled separately
|
||||
if (key === 'ODASA_TRACER_CONFIGURATION') {
|
||||
continue;
|
||||
}
|
||||
// skip undefined values
|
||||
if (typeof value === 'undefined') {
|
||||
continue;
|
||||
}
|
||||
// Keep variables that do not exist in current environment. In addition always keep
|
||||
// critical and CODEQL_ variables
|
||||
if (typeof process.env[key] === 'undefined' || CRITICAL_TRACER_VARS.has(key) || key.startsWith('CODEQL_')) {
|
||||
info.env[key] = value;
|
||||
}
|
||||
// Extract critical tracer variables from the environment
|
||||
for (let entry of Object.entries(env)) {
|
||||
const key = entry[0];
|
||||
const value = entry[1];
|
||||
// skip ODASA_TRACER_CONFIGURATION as it is handled separately
|
||||
if (key === 'ODASA_TRACER_CONFIGURATION') {
|
||||
continue;
|
||||
}
|
||||
return info;
|
||||
// skip undefined values
|
||||
if (typeof value === 'undefined') {
|
||||
continue;
|
||||
}
|
||||
// Keep variables that do not exist in current environment. In addition always keep
|
||||
// critical and CODEQL_ variables
|
||||
if (typeof process.env[key] === 'undefined' || CRITICAL_TRACER_VARS.has(key) || key.startsWith('CODEQL_')) {
|
||||
info.env[key] = value;
|
||||
}
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
function concatTracerConfigs(configs: { [lang: string]: TracerConfig }): TracerConfig {
|
||||
// A tracer config is a map containing additional environment variables and a tracer 'spec' file.
|
||||
// A tracer 'spec' file has the following format [log_file, number_of_blocks, blocks_text]
|
||||
// A tracer config is a map containing additional environment variables and a tracer 'spec' file.
|
||||
// A tracer 'spec' file has the following format [log_file, number_of_blocks, blocks_text]
|
||||
|
||||
// Merge the environments
|
||||
const env: { [key: string]: string; } = {};
|
||||
let copyExecutables = false;
|
||||
let envSize = 0;
|
||||
for (let v of Object.values(configs)) {
|
||||
for (let e of Object.entries(v.env)) {
|
||||
const name = e[0];
|
||||
const value = e[1];
|
||||
// skip SEMMLE_COPY_EXECUTABLES_ROOT as it is handled separately
|
||||
if (name === 'SEMMLE_COPY_EXECUTABLES_ROOT') {
|
||||
copyExecutables = true;
|
||||
} else if (name in env) {
|
||||
if (env[name] !== value) {
|
||||
throw Error('Incompatible values in environment parameter ' +
|
||||
name + ': ' + env[name] + ' and ' + value);
|
||||
}
|
||||
} else {
|
||||
env[name] = value;
|
||||
envSize += 1;
|
||||
}
|
||||
// Merge the environments
|
||||
const env: { [key: string]: string; } = {};
|
||||
let copyExecutables = false;
|
||||
let envSize = 0;
|
||||
for (let v of Object.values(configs)) {
|
||||
for (let e of Object.entries(v.env)) {
|
||||
const name = e[0];
|
||||
const value = e[1];
|
||||
// skip SEMMLE_COPY_EXECUTABLES_ROOT as it is handled separately
|
||||
if (name === 'SEMMLE_COPY_EXECUTABLES_ROOT') {
|
||||
copyExecutables = true;
|
||||
} else if (name in env) {
|
||||
if (env[name] !== value) {
|
||||
throw Error('Incompatible values in environment parameter ' +
|
||||
name + ': ' + env[name] + ' and ' + value);
|
||||
}
|
||||
}
|
||||
|
||||
// Concatenate spec files into a new spec file
|
||||
let languages = Object.keys(configs);
|
||||
const cppIndex = languages.indexOf('cpp');
|
||||
// Make sure cpp is the last language, if it's present since it must be concatenated last
|
||||
if (cppIndex !== -1) {
|
||||
let lastLang = languages[languages.length - 1];
|
||||
languages[languages.length - 1] = languages[cppIndex];
|
||||
languages[cppIndex] = lastLang;
|
||||
}
|
||||
|
||||
let totalLines: string[] = [];
|
||||
let totalCount = 0;
|
||||
for (let lang of languages) {
|
||||
const lines = fs.readFileSync(configs[lang].spec, 'utf8').split(/\r?\n/);
|
||||
const count = parseInt(lines[1], 10);
|
||||
totalCount += count;
|
||||
totalLines.push(...lines.slice(2));
|
||||
}
|
||||
|
||||
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
|
||||
const spec = path.resolve(tempFolder, 'compound-spec');
|
||||
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
|
||||
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
|
||||
|
||||
if (copyExecutables) {
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
|
||||
} else {
|
||||
env[name] = value;
|
||||
envSize += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(spec, newSpecContent.join('\n'));
|
||||
// Concatenate spec files into a new spec file
|
||||
let languages = Object.keys(configs);
|
||||
const cppIndex = languages.indexOf('cpp');
|
||||
// Make sure cpp is the last language, if it's present since it must be concatenated last
|
||||
if (cppIndex !== -1) {
|
||||
let lastLang = languages[languages.length - 1];
|
||||
languages[languages.length - 1] = languages[cppIndex];
|
||||
languages[cppIndex] = lastLang;
|
||||
}
|
||||
|
||||
// Prepare the content of the compound environment file
|
||||
let buffer = Buffer.alloc(4);
|
||||
buffer.writeInt32LE(envSize, 0);
|
||||
for (let e of Object.entries(env)) {
|
||||
const key = e[0];
|
||||
const value = e[1];
|
||||
const lineBuffer = new Buffer(key + '=' + value + '\0', 'utf8');
|
||||
const sizeBuffer = Buffer.alloc(4);
|
||||
sizeBuffer.writeInt32LE(lineBuffer.length, 0);
|
||||
buffer = Buffer.concat([buffer, sizeBuffer, lineBuffer]);
|
||||
}
|
||||
// Write the compound environment
|
||||
const envPath = spec + '.environment';
|
||||
fs.writeFileSync(envPath, buffer);
|
||||
let totalLines: string[] = [];
|
||||
let totalCount = 0;
|
||||
for (let lang of languages) {
|
||||
const lines = fs.readFileSync(configs[lang].spec, 'utf8').split(/\r?\n/);
|
||||
const count = parseInt(lines[1], 10);
|
||||
totalCount += count;
|
||||
totalLines.push(...lines.slice(2));
|
||||
}
|
||||
|
||||
return { env, spec };
|
||||
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
|
||||
const spec = path.resolve(tempFolder, 'compound-spec');
|
||||
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
|
||||
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
|
||||
|
||||
if (copyExecutables) {
|
||||
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
|
||||
envSize += 1;
|
||||
}
|
||||
|
||||
fs.writeFileSync(spec, newSpecContent.join('\n'));
|
||||
|
||||
// Prepare the content of the compound environment file
|
||||
let buffer = Buffer.alloc(4);
|
||||
buffer.writeInt32LE(envSize, 0);
|
||||
for (let e of Object.entries(env)) {
|
||||
const key = e[0];
|
||||
const value = e[1];
|
||||
const lineBuffer = new Buffer(key + '=' + value + '\0', 'utf8');
|
||||
const sizeBuffer = Buffer.alloc(4);
|
||||
sizeBuffer.writeInt32LE(lineBuffer.length, 0);
|
||||
buffer = Buffer.concat([buffer, sizeBuffer, lineBuffer]);
|
||||
}
|
||||
// Write the compound environment
|
||||
const envPath = spec + '.environment';
|
||||
fs.writeFileSync(envPath, buffer);
|
||||
|
||||
return { env, spec };
|
||||
}
|
||||
|
||||
async function run() {
|
||||
|
||||
let languages: string[];
|
||||
let languages: string[];
|
||||
|
||||
try {
|
||||
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
|
||||
return;
|
||||
}
|
||||
|
||||
core.startGroup('Load language configuration');
|
||||
|
||||
const config = await configUtils.loadConfig();
|
||||
|
||||
languages = await util.getLanguages();
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
if (languages.length === 0) {
|
||||
core.setFailed("Did not detect any languages to analyze. Please update input in workflow.");
|
||||
return;
|
||||
}
|
||||
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, languages);
|
||||
|
||||
core.endGroup();
|
||||
|
||||
} catch (e) {
|
||||
core.setFailed(e.message);
|
||||
await util.reportActionAborted('init', e.message);
|
||||
return;
|
||||
try {
|
||||
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
core.startGroup('Load language configuration');
|
||||
|
||||
const sourceRoot = path.resolve();
|
||||
const config = await configUtils.loadConfig();
|
||||
|
||||
core.startGroup('Setup CodeQL tools');
|
||||
const codeqlSetup = await setuptools.setupCodeQL();
|
||||
await exec.exec(codeqlSetup.cmd, ['version', '--format=json']);
|
||||
core.endGroup();
|
||||
|
||||
// Forward Go flags
|
||||
const goFlags = process.env['GOFLAGS'];
|
||||
if (goFlags) {
|
||||
core.exportVariable('GOFLAGS', goFlags);
|
||||
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
|
||||
}
|
||||
|
||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||
|
||||
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
|
||||
await io.mkdirP(databaseFolder);
|
||||
|
||||
let tracedLanguages: { [key: string]: TracerConfig } = {};
|
||||
let scannedLanguages: string[] = [];
|
||||
|
||||
// TODO: replace this code once CodeQL supports multi-language tracing
|
||||
for (let language of languages) {
|
||||
const languageDatabase = path.join(databaseFolder, language);
|
||||
|
||||
// Init language database
|
||||
await exec.exec(codeqlSetup.cmd, ['database', 'init', languageDatabase, '--language=' + language, '--source-root=' + sourceRoot]);
|
||||
// TODO: add better detection of 'traced languages' instead of using a hard coded list
|
||||
if (['cpp', 'java', 'csharp'].includes(language)) {
|
||||
const config: TracerConfig = await tracerConfig(codeqlSetup, languageDatabase);
|
||||
tracedLanguages[language] = config;
|
||||
} else {
|
||||
scannedLanguages.push(language);
|
||||
}
|
||||
}
|
||||
const tracedLanguageKeys = Object.keys(tracedLanguages);
|
||||
if (tracedLanguageKeys.length > 0) {
|
||||
const mainTracerConfig = concatTracerConfigs(tracedLanguages);
|
||||
if (mainTracerConfig.spec) {
|
||||
for (let entry of Object.entries(mainTracerConfig.env)) {
|
||||
core.exportVariable(entry[0], entry[1]);
|
||||
}
|
||||
|
||||
core.exportVariable('ODASA_TRACER_CONFIGURATION', mainTracerConfig.spec);
|
||||
if (process.platform === 'darwin') {
|
||||
core.exportVariable(
|
||||
'DYLD_INSERT_LIBRARIES',
|
||||
path.join(codeqlSetup.tools, 'osx64', 'libtrace.dylib'));
|
||||
} else if (process.platform === 'win32') {
|
||||
await exec.exec(
|
||||
'powershell',
|
||||
[path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||
path.resolve(codeqlSetup.tools, 'win64', 'tracer.exe')],
|
||||
{ env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
|
||||
} else {
|
||||
core.exportVariable('LD_PRELOAD', path.join(codeqlSetup.tools, 'linux64', '${LIB}trace.so'));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES, scannedLanguages.join(','));
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES, tracedLanguageKeys.join(','));
|
||||
|
||||
// TODO: make this a "private" environment variable of the action
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_DATABASE_DIR, databaseFolder);
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_CMD, codeqlSetup.cmd);
|
||||
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('init', error.message, error.stack);
|
||||
return;
|
||||
languages = await util.getLanguages();
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
if (languages.length === 0) {
|
||||
core.setFailed("Did not detect any languages to analyze. Please update input in workflow.");
|
||||
return;
|
||||
}
|
||||
await util.reportActionSucceeded('init');
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, languages);
|
||||
|
||||
core.endGroup();
|
||||
|
||||
} catch (e) {
|
||||
core.setFailed(e.message);
|
||||
await util.reportActionAborted('init', e.message);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
const sourceRoot = path.resolve();
|
||||
|
||||
core.startGroup('Setup CodeQL tools');
|
||||
const codeqlSetup = await setuptools.setupCodeQL();
|
||||
await exec.exec(codeqlSetup.cmd, ['version', '--format=json']);
|
||||
core.endGroup();
|
||||
|
||||
// Forward Go flags
|
||||
const goFlags = process.env['GOFLAGS'];
|
||||
if (goFlags) {
|
||||
core.exportVariable('GOFLAGS', goFlags);
|
||||
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
|
||||
}
|
||||
|
||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||
|
||||
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
|
||||
await io.mkdirP(databaseFolder);
|
||||
|
||||
let tracedLanguages: { [key: string]: TracerConfig } = {};
|
||||
let scannedLanguages: string[] = [];
|
||||
|
||||
// TODO: replace this code once CodeQL supports multi-language tracing
|
||||
for (let language of languages) {
|
||||
const languageDatabase = path.join(databaseFolder, language);
|
||||
|
||||
// Init language database
|
||||
await exec.exec(codeqlSetup.cmd, [
|
||||
'database',
|
||||
'init',
|
||||
languageDatabase,
|
||||
'--language=' + language,
|
||||
'--source-root=' + sourceRoot,
|
||||
]);
|
||||
// TODO: add better detection of 'traced languages' instead of using a hard coded list
|
||||
if (['cpp', 'java', 'csharp'].includes(language)) {
|
||||
const config: TracerConfig = await tracerConfig(codeqlSetup, languageDatabase);
|
||||
tracedLanguages[language] = config;
|
||||
} else {
|
||||
scannedLanguages.push(language);
|
||||
}
|
||||
}
|
||||
const tracedLanguageKeys = Object.keys(tracedLanguages);
|
||||
if (tracedLanguageKeys.length > 0) {
|
||||
const mainTracerConfig = concatTracerConfigs(tracedLanguages);
|
||||
if (mainTracerConfig.spec) {
|
||||
for (let entry of Object.entries(mainTracerConfig.env)) {
|
||||
core.exportVariable(entry[0], entry[1]);
|
||||
}
|
||||
|
||||
core.exportVariable('ODASA_TRACER_CONFIGURATION', mainTracerConfig.spec);
|
||||
if (process.platform === 'darwin') {
|
||||
core.exportVariable(
|
||||
'DYLD_INSERT_LIBRARIES',
|
||||
path.join(codeqlSetup.tools, 'osx64', 'libtrace.dylib'));
|
||||
} else if (process.platform === 'win32') {
|
||||
await exec.exec(
|
||||
'powershell',
|
||||
[
|
||||
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
|
||||
path.resolve(codeqlSetup.tools, 'win64', 'tracer.exe'),
|
||||
],
|
||||
{ env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
|
||||
} else {
|
||||
core.exportVariable('LD_PRELOAD', path.join(codeqlSetup.tools, 'linux64', '${LIB}trace.so'));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES, scannedLanguages.join(','));
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES, tracedLanguageKeys.join(','));
|
||||
|
||||
// TODO: make this a "private" environment variable of the action
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_DATABASE_DIR, databaseFolder);
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_CMD, codeqlSetup.cmd);
|
||||
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('init', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
await util.reportActionSucceeded('init');
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
}
|
||||
|
||||
run().catch(e => {
|
||||
core.setFailed("init action failed: " + e);
|
||||
console.log(e);
|
||||
core.setFailed("init action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
|
||||
@@ -2,12 +2,11 @@ import * as fs from 'fs';
|
||||
|
||||
const env = {};
|
||||
for (let entry of Object.entries(process.env)) {
|
||||
const key = entry[0];
|
||||
const value = entry[1];
|
||||
if (typeof value !== 'undefined' && key !== '_' && !key.startsWith('JAVA_MAIN_CLASS_')) {
|
||||
env[key] = value;
|
||||
}
|
||||
const key = entry[0];
|
||||
const value = entry[1];
|
||||
if (typeof value !== 'undefined' && key !== '_' && !key.startsWith('JAVA_MAIN_CLASS_')) {
|
||||
env[key] = value;
|
||||
}
|
||||
}
|
||||
process.stdout.write(process.argv[2]);
|
||||
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');
|
||||
|
||||
|
||||
@@ -14,212 +14,212 @@ import * as util from './util';
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
export function combineSarifFiles(sarifFiles: string[]): string {
|
||||
let combinedSarif = {
|
||||
version: null,
|
||||
runs: [] as any[]
|
||||
};
|
||||
let combinedSarif = {
|
||||
version: null,
|
||||
runs: [] as any[]
|
||||
};
|
||||
|
||||
for (let sarifFile of sarifFiles) {
|
||||
let sarifObject = JSON.parse(fs.readFileSync(sarifFile, 'utf8'));
|
||||
// Check SARIF version
|
||||
if (combinedSarif.version === null) {
|
||||
combinedSarif.version = sarifObject.version;
|
||||
} else if (combinedSarif.version !== sarifObject.version) {
|
||||
throw "Different SARIF versions encountered: " + combinedSarif.version + " and " + sarifObject.version;
|
||||
}
|
||||
|
||||
combinedSarif.runs.push(...sarifObject.runs);
|
||||
for (let sarifFile of sarifFiles) {
|
||||
let sarifObject = JSON.parse(fs.readFileSync(sarifFile, 'utf8'));
|
||||
// Check SARIF version
|
||||
if (combinedSarif.version === null) {
|
||||
combinedSarif.version = sarifObject.version;
|
||||
} else if (combinedSarif.version !== sarifObject.version) {
|
||||
throw "Different SARIF versions encountered: " + combinedSarif.version + " and " + sarifObject.version;
|
||||
}
|
||||
|
||||
return JSON.stringify(combinedSarif);
|
||||
combinedSarif.runs.push(...sarifObject.runs);
|
||||
}
|
||||
|
||||
return JSON.stringify(combinedSarif);
|
||||
}
|
||||
|
||||
// Upload the given payload.
|
||||
// If the request fails then this will retry a small number of times.
|
||||
async function uploadPayload(payload): Promise<boolean> {
|
||||
core.info('Uploading results');
|
||||
core.info('Uploading results');
|
||||
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env['TEST_MODE'] === 'true' || false;
|
||||
if (testMode) {
|
||||
return true;
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env['TEST_MODE'] === 'true' || false;
|
||||
if (testMode) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const githubToken = core.getInput('token');
|
||||
const ph: auth.BearerCredentialHandler = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY'] + '/code-scanning/analysis';
|
||||
|
||||
// Make up to 4 attempts to upload, and sleep for these
|
||||
// number of seconds between each attempt.
|
||||
// We don't want to backoff too much to avoid wasting action
|
||||
// minutes, but just waiting a little bit could maybe help.
|
||||
const backoffPeriods = [1, 5, 15];
|
||||
|
||||
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
|
||||
|
||||
const res: http.HttpClientResponse = await client.put(url, payload);
|
||||
core.debug('response status: ' + res.message.statusCode);
|
||||
|
||||
const statusCode = res.message.statusCode;
|
||||
if (statusCode === 202) {
|
||||
core.info("Successfully uploaded results");
|
||||
return true;
|
||||
}
|
||||
|
||||
const githubToken = core.getInput('token');
|
||||
const ph: auth.BearerCredentialHandler = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY'] + '/code-scanning/analysis';
|
||||
const requestID = res.message.headers["x-github-request-id"];
|
||||
|
||||
// Make up to 4 attempts to upload, and sleep for these
|
||||
// number of seconds between each attempt.
|
||||
// We don't want to backoff too much to avoid wasting action
|
||||
// minutes, but just waiting a little bit could maybe help.
|
||||
const backoffPeriods = [1, 5, 15];
|
||||
|
||||
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
|
||||
|
||||
const res: http.HttpClientResponse = await client.put(url, payload);
|
||||
core.debug('response status: ' + res.message.statusCode);
|
||||
|
||||
const statusCode = res.message.statusCode;
|
||||
if (statusCode === 202) {
|
||||
core.info("Successfully uploaded results");
|
||||
return true;
|
||||
}
|
||||
|
||||
const requestID = res.message.headers["x-github-request-id"];
|
||||
|
||||
// On any other status code that's not 5xx mark the upload as failed
|
||||
if (!statusCode || statusCode < 500 || statusCode >= 600) {
|
||||
core.setFailed('Upload failed (' + requestID + '): (' + statusCode + ') ' + await res.readBody());
|
||||
return false;
|
||||
}
|
||||
|
||||
// On a 5xx status code we may retry the request
|
||||
if (attempt < backoffPeriods.length) {
|
||||
// Log the failure as a warning but don't mark the action as failed yet
|
||||
core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
|
||||
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
|
||||
' seconds: (' + statusCode + ') ' + await res.readBody());
|
||||
// Sleep for the backoff period
|
||||
await new Promise(r => setTimeout(r, backoffPeriods[attempt] * 1000));
|
||||
continue;
|
||||
|
||||
} else {
|
||||
// If the upload fails with 5xx then we assume it is a temporary problem
|
||||
// and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
core.error('Upload failed (' + requestID + '): (' + statusCode + ') ' + await res.readBody());
|
||||
return false;
|
||||
}
|
||||
// On any other status code that's not 5xx mark the upload as failed
|
||||
if (!statusCode || statusCode < 500 || statusCode >= 600) {
|
||||
core.setFailed('Upload failed (' + requestID + '): (' + statusCode + ') ' + await res.readBody());
|
||||
return false;
|
||||
}
|
||||
|
||||
return false;
|
||||
// On a 5xx status code we may retry the request
|
||||
if (attempt < backoffPeriods.length) {
|
||||
// Log the failure as a warning but don't mark the action as failed yet
|
||||
core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
|
||||
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
|
||||
' seconds: (' + statusCode + ') ' + await res.readBody());
|
||||
// Sleep for the backoff period
|
||||
await new Promise(r => setTimeout(r, backoffPeriods[attempt] * 1000));
|
||||
continue;
|
||||
|
||||
} else {
|
||||
// If the upload fails with 5xx then we assume it is a temporary problem
|
||||
// and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
core.error('Upload failed (' + requestID + '): (' + statusCode + ') ' + await res.readBody());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
export async function upload(input: string): Promise<boolean> {
|
||||
if (fs.lstatSync(input).isDirectory()) {
|
||||
const sarifFiles = fs.readdirSync(input)
|
||||
.filter(f => f.endsWith(".sarif"))
|
||||
.map(f => path.resolve(input, f));
|
||||
if (sarifFiles.length === 0) {
|
||||
core.setFailed("No SARIF files found to upload in \"" + input + "\".");
|
||||
return false;
|
||||
}
|
||||
return await uploadFiles(sarifFiles);
|
||||
} else {
|
||||
return await uploadFiles([input]);
|
||||
if (fs.lstatSync(input).isDirectory()) {
|
||||
const sarifFiles = fs.readdirSync(input)
|
||||
.filter(f => f.endsWith(".sarif"))
|
||||
.map(f => path.resolve(input, f));
|
||||
if (sarifFiles.length === 0) {
|
||||
core.setFailed("No SARIF files found to upload in \"" + input + "\".");
|
||||
return false;
|
||||
}
|
||||
return await uploadFiles(sarifFiles);
|
||||
} else {
|
||||
return await uploadFiles([input]);
|
||||
}
|
||||
}
|
||||
|
||||
// Counts the number of results in the given SARIF file
|
||||
export function countResultsInSarif(sarif: string): number {
|
||||
let numResults = 0;
|
||||
for (const run of JSON.parse(sarif).runs) {
|
||||
numResults += run.results.length;
|
||||
}
|
||||
return numResults;
|
||||
let numResults = 0;
|
||||
for (const run of JSON.parse(sarif).runs) {
|
||||
numResults += run.results.length;
|
||||
}
|
||||
return numResults;
|
||||
}
|
||||
|
||||
// Validates that the given file path refers to a valid SARIF file.
|
||||
// Returns a non-empty list of error message if the file is invalid,
|
||||
// otherwise returns the empty list if the file is valid.
|
||||
export function validateSarifFileSchema(sarifFilePath: string): boolean {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
|
||||
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
|
||||
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
|
||||
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (result.valid) {
|
||||
return true;
|
||||
} else {
|
||||
// Set the failure message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const errorMessages = result.errors.map(e => "- " + e.stack);
|
||||
core.setFailed("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + errorMessages.join("\n"));
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (result.valid) {
|
||||
return true;
|
||||
} else {
|
||||
// Set the failure message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const errorMessages = result.errors.map(e => "- " + e.stack);
|
||||
core.setFailed("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + errorMessages.join("\n"));
|
||||
|
||||
// Also output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
core.startGroup("Error details: " + error.stack);
|
||||
core.info(JSON.stringify(error, null, 2));
|
||||
core.endGroup();
|
||||
}
|
||||
|
||||
return false;
|
||||
// Also output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
core.startGroup("Error details: " + error.stack);
|
||||
core.info(JSON.stringify(error, null, 2));
|
||||
core.endGroup();
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles: string[]): Promise<boolean> {
|
||||
core.startGroup("Uploading results");
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
core.startGroup("Uploading results");
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
return false;
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
return false;
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
if (!validateSarifFileSchema(file)) {
|
||||
return false;
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
}
|
||||
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
if (!validateSarifFileSchema(file)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const commitOid = await util.getCommitOid();
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRef();
|
||||
const analysisKey = await util.getAnalysisKey();
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
|
||||
const commitOid = await util.getCommitOid();
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRef();
|
||||
const analysisKey = await util.getAnalysisKey();
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutPath = core.getInput('checkout_path');
|
||||
let checkoutURI = fileUrl(checkoutPath);
|
||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
|
||||
const zipped_sarif = zlib.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutPath = core.getInput('checkout_path');
|
||||
let checkoutURI = fileUrl(checkoutPath);
|
||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
return false;
|
||||
}
|
||||
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
return false;
|
||||
}
|
||||
let matrix: string | undefined = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
matrix = undefined;
|
||||
}
|
||||
|
||||
let matrix: string | undefined = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
matrix = undefined;
|
||||
}
|
||||
const toolNames = util.getToolNames(sarifPayload);
|
||||
|
||||
const toolNames = util.getToolNames(sarifPayload);
|
||||
const payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_key": analysisKey,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": matrix,
|
||||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
|
||||
const payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_key": analysisKey,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": matrix,
|
||||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
// Log some useful debug info about the info
|
||||
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
|
||||
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
|
||||
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
|
||||
|
||||
// Log some useful debug info about the info
|
||||
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
|
||||
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
|
||||
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
|
||||
// Make the upload
|
||||
const succeeded = await uploadPayload(payload);
|
||||
|
||||
// Make the upload
|
||||
const succeeded = await uploadPayload(payload);
|
||||
core.endGroup();
|
||||
|
||||
core.endGroup();
|
||||
|
||||
return succeeded;
|
||||
return succeeded;
|
||||
}
|
||||
|
||||
@@ -4,24 +4,24 @@ import * as upload_lib from './upload-lib';
|
||||
import * as util from './util';
|
||||
|
||||
async function run() {
|
||||
if (util.should_abort('upload-sarif', false) || !await util.reportActionStarting('upload-sarif')) {
|
||||
return;
|
||||
}
|
||||
if (util.should_abort('upload-sarif', false) || !await util.reportActionStarting('upload-sarif')) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
if (await upload_lib.upload(core.getInput('sarif_file'))) {
|
||||
await util.reportActionSucceeded('upload-sarif');
|
||||
} else {
|
||||
await util.reportActionFailed('upload-sarif', 'upload');
|
||||
}
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('upload-sarif', error.message, error.stack);
|
||||
return;
|
||||
try {
|
||||
if (await upload_lib.upload(core.getInput('sarif_file'))) {
|
||||
await util.reportActionSucceeded('upload-sarif');
|
||||
} else {
|
||||
await util.reportActionFailed('upload-sarif', 'upload');
|
||||
}
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('upload-sarif', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
run().catch(e => {
|
||||
core.setFailed("codeql/upload-sarif action failed: " + e);
|
||||
console.log(e);
|
||||
core.setFailed("codeql/upload-sarif action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
|
||||
510
src/util.ts
510
src/util.ts
@@ -19,81 +19,81 @@ import * as sharedEnv from './shared-environment';
|
||||
*/
|
||||
export function should_abort(actionName: string, requireInitActionHasRun: boolean): boolean {
|
||||
|
||||
// Check that required aspects of the environment are present
|
||||
const ref = process.env['GITHUB_REF'];
|
||||
if (ref === undefined) {
|
||||
core.setFailed('GITHUB_REF must be set.');
|
||||
return true;
|
||||
}
|
||||
// Check that required aspects of the environment are present
|
||||
const ref = process.env['GITHUB_REF'];
|
||||
if (ref === undefined) {
|
||||
core.setFailed('GITHUB_REF must be set.');
|
||||
return true;
|
||||
}
|
||||
|
||||
// If the init action is required, then check the it completed successfully.
|
||||
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
|
||||
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
|
||||
return true;
|
||||
}
|
||||
// If the init action is required, then check the it completed successfully.
|
||||
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
|
||||
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an environment parameter, but throw an error if it is not set.
|
||||
*/
|
||||
export function getRequiredEnvParam(paramName: string): string {
|
||||
const value = process.env[paramName];
|
||||
if (value === undefined) {
|
||||
throw new Error(paramName + ' environment variable must be set');
|
||||
}
|
||||
core.debug(paramName + '=' + value);
|
||||
return value;
|
||||
const value = process.env[paramName];
|
||||
if (value === undefined) {
|
||||
throw new Error(paramName + ' environment variable must be set');
|
||||
}
|
||||
core.debug(paramName + '=' + value);
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the set of languages in the current repository
|
||||
*/
|
||||
async function getLanguagesInRepo(): Promise<string[]> {
|
||||
// Translate between GitHub's API names for languages and ours
|
||||
const codeqlLanguages = {
|
||||
'C': 'cpp',
|
||||
'C++': 'cpp',
|
||||
'C#': 'csharp',
|
||||
'Go': 'go',
|
||||
'Java': 'java',
|
||||
'JavaScript': 'javascript',
|
||||
'TypeScript': 'javascript',
|
||||
'Python': 'python',
|
||||
};
|
||||
let repo_nwo = process.env['GITHUB_REPOSITORY']?.split("/");
|
||||
if (repo_nwo) {
|
||||
let owner = repo_nwo[0];
|
||||
let repo = repo_nwo[1];
|
||||
// Translate between GitHub's API names for languages and ours
|
||||
const codeqlLanguages = {
|
||||
'C': 'cpp',
|
||||
'C++': 'cpp',
|
||||
'C#': 'csharp',
|
||||
'Go': 'go',
|
||||
'Java': 'java',
|
||||
'JavaScript': 'javascript',
|
||||
'TypeScript': 'javascript',
|
||||
'Python': 'python',
|
||||
};
|
||||
let repo_nwo = process.env['GITHUB_REPOSITORY']?.split("/");
|
||||
if (repo_nwo) {
|
||||
let owner = repo_nwo[0];
|
||||
let repo = repo_nwo[1];
|
||||
|
||||
core.debug(`GitHub repo ${owner} ${repo}`);
|
||||
let ok = new octokit.Octokit({
|
||||
auth: core.getInput('token'),
|
||||
userAgent: "CodeQL Action",
|
||||
log: consoleLogLevel({ level: "debug" })
|
||||
});
|
||||
const response = await ok.request("GET /repos/:owner/:repo/languages", ({
|
||||
owner,
|
||||
repo
|
||||
}));
|
||||
core.debug(`GitHub repo ${owner} ${repo}`);
|
||||
let ok = new octokit.Octokit({
|
||||
auth: core.getInput('token'),
|
||||
userAgent: "CodeQL Action",
|
||||
log: consoleLogLevel({ level: "debug" })
|
||||
});
|
||||
const response = await ok.request("GET /repos/:owner/:repo/languages", ({
|
||||
owner,
|
||||
repo
|
||||
}));
|
||||
|
||||
core.debug("Languages API response: " + JSON.stringify(response));
|
||||
core.debug("Languages API response: " + JSON.stringify(response));
|
||||
|
||||
// The GitHub API is going to return languages in order of popularity,
|
||||
// When we pick a language to autobuild we want to pick the most popular traced language
|
||||
// Since sets in javascript maintain insertion order, using a set here and then splatting it
|
||||
// into an array gives us an array of languages ordered by popularity
|
||||
let languages: Set<string> = new Set();
|
||||
for (let lang in response.data) {
|
||||
if (lang in codeqlLanguages) {
|
||||
languages.add(codeqlLanguages[lang]);
|
||||
}
|
||||
}
|
||||
return [...languages];
|
||||
} else {
|
||||
return [];
|
||||
// The GitHub API is going to return languages in order of popularity,
|
||||
// When we pick a language to autobuild we want to pick the most popular traced language
|
||||
// Since sets in javascript maintain insertion order, using a set here and then splatting it
|
||||
// into an array gives us an array of languages ordered by popularity
|
||||
let languages: Set<string> = new Set();
|
||||
for (let lang in response.data) {
|
||||
if (lang in codeqlLanguages) {
|
||||
languages.add(codeqlLanguages[lang]);
|
||||
}
|
||||
}
|
||||
return [...languages];
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -110,71 +110,71 @@ async function getLanguagesInRepo(): Promise<string[]> {
|
||||
*/
|
||||
export async function getLanguages(): Promise<string[]> {
|
||||
|
||||
// Obtain from CODEQL_ACTION_LANGUAGES if set
|
||||
const langsVar = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES];
|
||||
if (langsVar) {
|
||||
return langsVar.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
}
|
||||
// Obtain from action input 'languages' if set
|
||||
let languages = core.getInput('languages', { required: false })
|
||||
.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
core.info("Languages from configuration: " + JSON.stringify(languages));
|
||||
// Obtain from CODEQL_ACTION_LANGUAGES if set
|
||||
const langsVar = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES];
|
||||
if (langsVar) {
|
||||
return langsVar.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
}
|
||||
// Obtain from action input 'languages' if set
|
||||
let languages = core.getInput('languages', { required: false })
|
||||
.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
core.info("Languages from configuration: " + JSON.stringify(languages));
|
||||
|
||||
if (languages.length === 0) {
|
||||
// Obtain languages as all languages in the repo that can be analysed
|
||||
languages = await getLanguagesInRepo();
|
||||
core.info("Automatically detected languages: " + JSON.stringify(languages));
|
||||
}
|
||||
if (languages.length === 0) {
|
||||
// Obtain languages as all languages in the repo that can be analysed
|
||||
languages = await getLanguagesInRepo();
|
||||
core.info("Automatically detected languages: " + JSON.stringify(languages));
|
||||
}
|
||||
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_LANGUAGES, languages.join(','));
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_LANGUAGES, languages.join(','));
|
||||
|
||||
return languages;
|
||||
return languages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
export async function getCommitOid(): Promise<string> {
|
||||
let commitOid = '';
|
||||
await exec.exec('git', ['rev-parse', 'HEAD'], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => { commitOid += data.toString(); },
|
||||
stderr: (data) => { process.stderr.write(data); }
|
||||
}
|
||||
});
|
||||
return commitOid.trim();
|
||||
let commitOid = '';
|
||||
await exec.exec('git', ['rev-parse', 'HEAD'], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => { commitOid += data.toString(); },
|
||||
stderr: (data) => { process.stderr.write(data); }
|
||||
}
|
||||
});
|
||||
return commitOid.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the path of the currently executing workflow.
|
||||
*/
|
||||
async function getWorkflowPath(): Promise<string> {
|
||||
const repo_nwo = getRequiredEnvParam('GITHUB_REPOSITORY').split("/");
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
const run_id = getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const repo_nwo = getRequiredEnvParam('GITHUB_REPOSITORY').split("/");
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
const run_id = getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
|
||||
const ok = new octokit.Octokit({
|
||||
auth: core.getInput('token'),
|
||||
userAgent: "CodeQL Action",
|
||||
log: consoleLogLevel({ level: 'debug' })
|
||||
});
|
||||
const ok = new octokit.Octokit({
|
||||
auth: core.getInput('token'),
|
||||
userAgent: "CodeQL Action",
|
||||
log: consoleLogLevel({ level: 'debug' })
|
||||
});
|
||||
|
||||
const runsResponse = await ok.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
|
||||
owner,
|
||||
repo,
|
||||
run_id
|
||||
});
|
||||
const workflowUrl = runsResponse.data.workflow_url;
|
||||
const runsResponse = await ok.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
|
||||
owner,
|
||||
repo,
|
||||
run_id
|
||||
});
|
||||
const workflowUrl = runsResponse.data.workflow_url;
|
||||
|
||||
const workflowResponse = await ok.request('GET ' + workflowUrl);
|
||||
const workflowResponse = await ok.request('GET ' + workflowUrl);
|
||||
|
||||
return workflowResponse.data.path;
|
||||
return workflowResponse.data.path;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -185,54 +185,54 @@ async function getWorkflowPath(): Promise<string> {
|
||||
* the github API, but after that the result will be cached.
|
||||
*/
|
||||
export async function getAnalysisKey(): Promise<string> {
|
||||
let analysisKey = process.env[sharedEnv.CODEQL_ACTION_ANALYSIS_KEY];
|
||||
if (analysisKey !== undefined) {
|
||||
return analysisKey;
|
||||
}
|
||||
|
||||
const workflowPath = await getWorkflowPath();
|
||||
const jobName = getRequiredEnvParam('GITHUB_JOB');
|
||||
|
||||
analysisKey = workflowPath + ':' + jobName;
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_ANALYSIS_KEY, analysisKey);
|
||||
let analysisKey = process.env[sharedEnv.CODEQL_ACTION_ANALYSIS_KEY];
|
||||
if (analysisKey !== undefined) {
|
||||
return analysisKey;
|
||||
}
|
||||
|
||||
const workflowPath = await getWorkflowPath();
|
||||
const jobName = getRequiredEnvParam('GITHUB_JOB');
|
||||
|
||||
analysisKey = workflowPath + ':' + jobName;
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_ANALYSIS_KEY, analysisKey);
|
||||
return analysisKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the ref currently being analyzed.
|
||||
*/
|
||||
export function getRef(): string {
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const ref = getRequiredEnvParam('GITHUB_REF');
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const ref = getRequiredEnvParam('GITHUB_REF');
|
||||
|
||||
// For pull request refs we want to convert from the 'merge' ref
|
||||
// to the 'head' ref, as that is what we want to analyse.
|
||||
// There should have been some code earlier in the workflow to do
|
||||
// the checkout, but we have no way of verifying that here.
|
||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||
if (pull_ref_regex.test(ref)) {
|
||||
return ref.replace(pull_ref_regex, 'refs/pull/$1/head');
|
||||
} else {
|
||||
return ref;
|
||||
}
|
||||
// For pull request refs we want to convert from the 'merge' ref
|
||||
// to the 'head' ref, as that is what we want to analyse.
|
||||
// There should have been some code earlier in the workflow to do
|
||||
// the checkout, but we have no way of verifying that here.
|
||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||
if (pull_ref_regex.test(ref)) {
|
||||
return ref.replace(pull_ref_regex, 'refs/pull/$1/head');
|
||||
} else {
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
|
||||
interface StatusReport {
|
||||
"workflow_run_id": number;
|
||||
"workflow_name": string;
|
||||
"job_name": string;
|
||||
"matrix_vars"?: string;
|
||||
"languages": string;
|
||||
"commit_oid": string;
|
||||
"ref": string;
|
||||
"action_name": string;
|
||||
"action_oid": string;
|
||||
"started_at": string;
|
||||
"completed_at"?: string;
|
||||
"status": string;
|
||||
"cause"?: string;
|
||||
"exception"?: string;
|
||||
"workflow_run_id": number;
|
||||
"workflow_name": string;
|
||||
"job_name": string;
|
||||
"matrix_vars"?: string;
|
||||
"languages": string;
|
||||
"commit_oid": string;
|
||||
"ref": string;
|
||||
"action_name": string;
|
||||
"action_oid": string;
|
||||
"started_at": string;
|
||||
"completed_at"?: string;
|
||||
"status": string;
|
||||
"cause"?: string;
|
||||
"exception"?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -244,54 +244,54 @@ interface StatusReport {
|
||||
* @param exception Exception (only supply if status is 'failure')
|
||||
*/
|
||||
async function createStatusReport(
|
||||
actionName: string,
|
||||
status: string,
|
||||
cause?: string,
|
||||
exception?: string):
|
||||
Promise<StatusReport> {
|
||||
actionName: string,
|
||||
status: string,
|
||||
cause?: string,
|
||||
exception?: string):
|
||||
Promise<StatusReport> {
|
||||
|
||||
const commitOid = process.env['GITHUB_SHA'] || '';
|
||||
const ref = getRef();
|
||||
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
|
||||
let workflowRunID = -1;
|
||||
if (workflowRunIDStr) {
|
||||
workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
}
|
||||
const workflowName = process.env['GITHUB_WORKFLOW'] || '';
|
||||
const jobName = process.env['GITHUB_JOB'] || '';
|
||||
const languages = (await getLanguages()).sort().join(',');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT] || new Date().toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_STARTED_AT, startedAt);
|
||||
const commitOid = process.env['GITHUB_SHA'] || '';
|
||||
const ref = getRef();
|
||||
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
|
||||
let workflowRunID = -1;
|
||||
if (workflowRunIDStr) {
|
||||
workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
}
|
||||
const workflowName = process.env['GITHUB_WORKFLOW'] || '';
|
||||
const jobName = process.env['GITHUB_JOB'] || '';
|
||||
const languages = (await getLanguages()).sort().join(',');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT] || new Date().toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_STARTED_AT, startedAt);
|
||||
|
||||
let statusReport: StatusReport = {
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_name: workflowName,
|
||||
job_name: jobName,
|
||||
languages: languages,
|
||||
commit_oid: commitOid,
|
||||
ref: ref,
|
||||
action_name: actionName,
|
||||
action_oid: "unknown", // TODO decide if it's possible to fill this in
|
||||
started_at: startedAt,
|
||||
status: status
|
||||
};
|
||||
let statusReport: StatusReport = {
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_name: workflowName,
|
||||
job_name: jobName,
|
||||
languages: languages,
|
||||
commit_oid: commitOid,
|
||||
ref: ref,
|
||||
action_name: actionName,
|
||||
action_oid: "unknown", // TODO decide if it's possible to fill this in
|
||||
started_at: startedAt,
|
||||
status: status
|
||||
};
|
||||
|
||||
// Add optional parameters
|
||||
if (cause) {
|
||||
statusReport.cause = cause;
|
||||
}
|
||||
if (exception) {
|
||||
statusReport.exception = exception;
|
||||
}
|
||||
if (status === 'success' || status === 'failure') {
|
||||
statusReport.completed_at = new Date().toISOString();
|
||||
}
|
||||
let matrix: string | undefined = core.getInput('matrix');
|
||||
if (matrix) {
|
||||
statusReport.matrix_vars = matrix;
|
||||
}
|
||||
// Add optional parameters
|
||||
if (cause) {
|
||||
statusReport.cause = cause;
|
||||
}
|
||||
if (exception) {
|
||||
statusReport.exception = exception;
|
||||
}
|
||||
if (status === 'success' || status === 'failure') {
|
||||
statusReport.completed_at = new Date().toISOString();
|
||||
}
|
||||
let matrix: string | undefined = core.getInput('matrix');
|
||||
if (matrix) {
|
||||
statusReport.matrix_vars = matrix;
|
||||
}
|
||||
|
||||
return statusReport;
|
||||
return statusReport;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -302,18 +302,18 @@ async function createStatusReport(
|
||||
* received.
|
||||
*/
|
||||
async function sendStatusReport(statusReport: StatusReport): Promise<number | undefined> {
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
|
||||
core.debug('Sending status report: ' + statusReportJSON);
|
||||
core.debug('Sending status report: ' + statusReportJSON);
|
||||
|
||||
const githubToken = core.getInput('token');
|
||||
const ph: auth.BearerCredentialHandler = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Status Report', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY']
|
||||
+ '/code-scanning/analysis/status';
|
||||
const res: http.HttpClientResponse = await client.put(url, statusReportJSON);
|
||||
const githubToken = core.getInput('token');
|
||||
const ph: auth.BearerCredentialHandler = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Status Report', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY']
|
||||
+ '/code-scanning/analysis/status';
|
||||
const res: http.HttpClientResponse = await client.put(url, statusReportJSON);
|
||||
|
||||
return res.message?.statusCode;
|
||||
return res.message?.statusCode;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -325,24 +325,24 @@ async function sendStatusReport(statusReport: StatusReport): Promise<number | un
|
||||
* Returns true unless a problem occurred and the action should abort.
|
||||
*/
|
||||
export async function reportActionStarting(action: string): Promise<boolean> {
|
||||
const statusCode = await sendStatusReport(await createStatusReport(action, 'starting'));
|
||||
const statusCode = await sendStatusReport(await createStatusReport(action, 'starting'));
|
||||
|
||||
// If the status report request fails with a 403 or a 404, then this is a deliberate
|
||||
// message from the endpoint that the SARIF upload can be expected to fail too,
|
||||
// so the action should fail to avoid wasting actions minutes.
|
||||
//
|
||||
// Other failure responses (or lack thereof) could be transitory and should not
|
||||
// cause the action to fail.
|
||||
if (statusCode === 403) {
|
||||
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
|
||||
return false;
|
||||
}
|
||||
if (statusCode === 404) {
|
||||
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
|
||||
return false;
|
||||
}
|
||||
// If the status report request fails with a 403 or a 404, then this is a deliberate
|
||||
// message from the endpoint that the SARIF upload can be expected to fail too,
|
||||
// so the action should fail to avoid wasting actions minutes.
|
||||
//
|
||||
// Other failure responses (or lack thereof) could be transitory and should not
|
||||
// cause the action to fail.
|
||||
if (statusCode === 403) {
|
||||
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
|
||||
return false;
|
||||
}
|
||||
if (statusCode === 404) {
|
||||
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -352,7 +352,7 @@ export async function reportActionStarting(action: string): Promise<boolean> {
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
export async function reportActionFailed(action: string, cause?: string, exception?: string) {
|
||||
await sendStatusReport(await createStatusReport(action, 'failure', cause, exception));
|
||||
await sendStatusReport(await createStatusReport(action, 'failure', cause, exception));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -362,7 +362,7 @@ export async function reportActionFailed(action: string, cause?: string, excepti
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
export async function reportActionSucceeded(action: string) {
|
||||
await sendStatusReport(await createStatusReport(action, 'success'));
|
||||
await sendStatusReport(await createStatusReport(action, 'success'));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -381,27 +381,31 @@ export async function reportActionAborted(action: string, cause?: string) {
|
||||
* Returns an array of unique string tool names.
|
||||
*/
|
||||
export function getToolNames(sarifContents: string): string[] {
|
||||
const sarif = JSON.parse(sarifContents);
|
||||
const toolNames = {};
|
||||
const sarif = JSON.parse(sarifContents);
|
||||
const toolNames = {};
|
||||
|
||||
for (const run of sarif.runs || []) {
|
||||
const tool = run.tool || {};
|
||||
const driver = tool.driver || {};
|
||||
if (typeof driver.name === "string" && driver.name.length > 0) {
|
||||
toolNames[driver.name] = true;
|
||||
}
|
||||
for (const run of sarif.runs || []) {
|
||||
const tool = run.tool || {};
|
||||
const driver = tool.driver || {};
|
||||
if (typeof driver.name === "string" && driver.name.length > 0) {
|
||||
toolNames[driver.name] = true;
|
||||
}
|
||||
}
|
||||
|
||||
return Object.keys(toolNames);
|
||||
return Object.keys(toolNames);
|
||||
}
|
||||
|
||||
// Creates a random temporary directory, runs the given body, and then deletes the directory.
|
||||
// Mostly intended for use within tests.
|
||||
export async function withTmpDir<T>(body: (tmpDir: string) => Promise<T>): Promise<T> {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
|
||||
const result = await body(tmpDir);
|
||||
fs.rmdirSync(tmpDir, { recursive: true });
|
||||
return result;
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
|
||||
const realSubdir = path.join(tmpDir, 'real');
|
||||
fs.mkdirSync(realSubdir);
|
||||
const symlinkSubdir = path.join(tmpDir, 'symlink');
|
||||
fs.symlinkSync(realSubdir, symlinkSubdir, 'dir');
|
||||
const result = await body(symlinkSubdir);
|
||||
fs.rmdirSync(tmpDir, { recursive: true });
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -411,20 +415,20 @@ export async function withTmpDir<T>(body: (tmpDir: string) => Promise<T>): Promi
|
||||
* @returns string
|
||||
*/
|
||||
export function getMemoryFlag(): string {
|
||||
let memoryToUseMegaBytes: number;
|
||||
const memoryToUseString = core.getInput("ram");
|
||||
if (memoryToUseString) {
|
||||
memoryToUseMegaBytes = Number(memoryToUseString);
|
||||
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
|
||||
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
|
||||
}
|
||||
} else {
|
||||
const totalMemoryBytes = os.totalmem();
|
||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||
const systemReservedMemoryMegaBytes = 256;
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
||||
let memoryToUseMegaBytes: number;
|
||||
const memoryToUseString = core.getInput("ram");
|
||||
if (memoryToUseString) {
|
||||
memoryToUseMegaBytes = Number(memoryToUseString);
|
||||
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
|
||||
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
|
||||
}
|
||||
return "--ram=" + Math.floor(memoryToUseMegaBytes);
|
||||
} else {
|
||||
const totalMemoryBytes = os.totalmem();
|
||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||
const systemReservedMemoryMegaBytes = 256;
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
||||
}
|
||||
return "--ram=" + Math.floor(memoryToUseMegaBytes);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -434,21 +438,21 @@ export function getMemoryFlag(): string {
|
||||
* @returns string
|
||||
*/
|
||||
export function getThreadsFlag(): string {
|
||||
let numThreads = 1;
|
||||
const numThreadsString = core.getInput("threads");
|
||||
if (numThreadsString) {
|
||||
numThreads = Number(numThreadsString);
|
||||
if (Number.isNaN(numThreads)) {
|
||||
throw new Error(`Invalid threads setting "${numThreadsString}", specified.`);
|
||||
}
|
||||
const maxThreads = os.cpus().length;
|
||||
if (numThreads > maxThreads) {
|
||||
numThreads = maxThreads;
|
||||
}
|
||||
const minThreads = -maxThreads;
|
||||
if (numThreads < minThreads) {
|
||||
numThreads = minThreads;
|
||||
}
|
||||
let numThreads = 1;
|
||||
const numThreadsString = core.getInput("threads");
|
||||
if (numThreadsString) {
|
||||
numThreads = Number(numThreadsString);
|
||||
if (Number.isNaN(numThreads)) {
|
||||
throw new Error(`Invalid threads setting "${numThreadsString}", specified.`);
|
||||
}
|
||||
return `--threads=${numThreads}`;
|
||||
const maxThreads = os.cpus().length;
|
||||
if (numThreads > maxThreads) {
|
||||
numThreads = maxThreads;
|
||||
}
|
||||
const minThreads = -maxThreads;
|
||||
if (numThreads < minThreads) {
|
||||
numThreads = minThreads;
|
||||
}
|
||||
}
|
||||
return `--threads=${numThreads}`;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user