Running lint-fix

This commit is contained in:
Chris Raynor
2020-09-14 10:44:43 +01:00
parent c96f84308a
commit a184d50a26
89 changed files with 3646 additions and 2809 deletions

23
lib/analysis-paths.js generated
View File

@@ -1,25 +1,24 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function isInterpretedLanguage(language) {
return language === 'javascript' || language === 'python';
return language === "javascript" || language === "python";
}
// Matches a string containing only characters that are legal to include in paths on windows.
exports.legalWindowsPathCharactersRegex = /^[^<>:"\|?]*$/;
// Builds an environment variable suitable for LGTM_INDEX_INCLUDE or LGTM_INDEX_EXCLUDE
function buildIncludeExcludeEnvVar(paths) {
// Ignore anything containing a *
paths = paths.filter(p => p.indexOf('*') === -1);
paths = paths.filter((p) => p.indexOf("*") === -1);
// Some characters are illegal in path names in windows
if (process.platform === 'win32') {
paths = paths.filter(p => p.match(exports.legalWindowsPathCharactersRegex));
if (process.platform === "win32") {
paths = paths.filter((p) => p.match(exports.legalWindowsPathCharactersRegex));
}
return paths.join('\n');
return paths.join("\n");
}
function printPathFiltersWarning(config, logger) {
// Index include/exclude/filters only work in javascript and python.
// If any other languages are detected/configured then show a warning.
if ((config.paths.length !== 0 ||
config.pathsIgnore.length !== 0) &&
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) {
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
}
@@ -34,19 +33,19 @@ function includeAndExcludeAnalysisPaths(config) {
// traverse the entire file tree to determine which files are matched.
// Any paths containing "*" are not included in these.
if (config.paths.length !== 0) {
process.env['LGTM_INDEX_INCLUDE'] = buildIncludeExcludeEnvVar(config.paths);
process.env["LGTM_INDEX_INCLUDE"] = buildIncludeExcludeEnvVar(config.paths);
}
if (config.pathsIgnore.length !== 0) {
process.env['LGTM_INDEX_EXCLUDE'] = buildIncludeExcludeEnvVar(config.pathsIgnore);
process.env["LGTM_INDEX_EXCLUDE"] = buildIncludeExcludeEnvVar(config.pathsIgnore);
}
// The 'LGTM_INDEX_FILTERS' environment variable controls which files are
// extracted or ignored. It does not control which directories are traversed.
// This does understand the glob and double-glob syntax.
const filters = [];
filters.push(...config.paths.map(p => 'include:' + p));
filters.push(...config.pathsIgnore.map(p => 'exclude:' + p));
filters.push(...config.paths.map((p) => `include:${p}`));
filters.push(...config.pathsIgnore.map((p) => `exclude:${p}`));
if (filters.length !== 0) {
process.env['LGTM_INDEX_FILTERS'] = filters.join('\n');
process.env["LGTM_INDEX_FILTERS"] = filters.join("\n");
}
}
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;AAGA,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEjD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACrE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CAAC,MAA0B,EAAE,MAAc;IAChF,oEAAoE;IACpE,sEAAsE;IACtE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;QAC5B,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAChC,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAEhD,MAAM,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC9G;AACH,CAAC;AATD,0DASC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;KACnF;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AAxBD,wEAwBC"}
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;AAGA,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAC3D,MAAM,CAAC,WAAW,CACnB,CAAC;KACH;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AA1BD,wEA0BC"}

View File

@@ -25,12 +25,12 @@ ava_1.default("emptyPaths", async (t) => {
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: '',
codeQLCmd: "",
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_FILTERS'], undefined);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
t.is(process.env["LGTM_INDEX_EXCLUDE"], undefined);
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
});
});
ava_1.default("nonEmptyPaths", async (t) => {
@@ -38,17 +38,17 @@ ava_1.default("nonEmptyPaths", async (t) => {
const config = {
languages: [],
queries: {},
paths: ['path1', 'path2', '**/path3'],
pathsIgnore: ['path4', 'path5', 'path6/**'],
paths: ["path1", "path2", "**/path3"],
pathsIgnore: ["path4", "path5", "path6/**"],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: '',
codeQLCmd: "",
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path4\npath5');
t.is(process.env['LGTM_INDEX_FILTERS'], 'include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**');
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
t.is(process.env["LGTM_INDEX_EXCLUDE"], "path4\npath5");
t.is(process.env["LGTM_INDEX_FILTERS"], "include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**");
});
});
//# sourceMappingURL=analysis-paths.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAC1C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAC1C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,gGAAgG,CAAC,CAAC;IAC5I,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

16
lib/analyze-action.js generated
View File

@@ -15,8 +15,10 @@ const repository_1 = require("./repository");
const util = __importStar(require("./util"));
async function sendStatusReport(startedAt, stats, error) {
var _a, _b, _c;
const status = ((_a = stats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined ? 'failure' : 'success';
const statusReportBase = await util.createStatusReportBase('finish', status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
const status = ((_a = stats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined
? "failure"
: "success";
const statusReportBase = await util.createStatusReportBase("finish", status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
const statusReport = {
...statusReportBase,
...(stats || {}),
@@ -28,15 +30,15 @@ async function run() {
let stats = undefined;
try {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('finish', 'starting', startedAt), true)) {
if (!(await util.sendStatusReport(await util.createStatusReportBase("finish", "starting", startedAt), true))) {
return;
}
const logger = logging_1.getActionsLogger();
const config = await config_utils_1.getConfig(util.getRequiredEnvParam('RUNNER_TEMP'), logger);
const config = await config_utils_1.getConfig(util.getRequiredEnvParam("RUNNER_TEMP"), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
}
stats = await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')), await util.getCommitOid(), util.getRef(), await util.getAnalysisKey(), util.getRequiredEnvParam('GITHUB_WORKFLOW'), util.getWorkflowRunID(), core.getInput('checkout_path'), core.getInput('matrix'), core.getInput('token'), util.getRequiredEnvParam('GITHUB_SERVER_URL'), core.getInput('upload') === 'true', 'actions', core.getInput('output'), util.getMemoryFlag(core.getInput('ram')), util.getAddSnippetsFlag(core.getInput('add-snippets')), util.getThreadsFlag(core.getInput('threads'), logger), config, logger);
stats = await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await util.getCommitOid(), util.getRef(), await util.getAnalysisKey(), util.getRequiredEnvParam("GITHUB_WORKFLOW"), util.getWorkflowRunID(), core.getInput("checkout_path"), core.getInput("matrix"), core.getInput("token"), util.getRequiredEnvParam("GITHUB_SERVER_URL"), core.getInput("upload") === "true", "actions", core.getInput("output"), util.getMemoryFlag(core.getInput("ram")), util.getAddSnippetsFlag(core.getInput("add-snippets")), util.getThreadsFlag(core.getInput("threads"), logger), config, logger);
}
catch (error) {
core.setFailed(error.message);
@@ -46,8 +48,8 @@ async function run() {
}
await sendStatusReport(startedAt, stats);
}
run().catch(e => {
core.setFailed("analyze action failed: " + e);
run().catch((e) => {
core.setFailed(`analyze action failed: ${e}`);
console.log(e);
});
//# sourceMappingURL=analyze-action.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,uCAA6D;AAC7D,iDAA2C;AAC3C,uCAA6C;AAC7C,6CAAkD;AAClD,6CAA+B;AAI/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GAAG,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5G,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,QAAE,KAAK,0CAAE,OAAO,QAAE,KAAK,0CAAE,KAAK,CAAC,CAAC;IACtH,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC1G,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,MAAM,GAAG,MAAM,wBAAS,CAAC,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAAE,MAAM,CAAC,CAAC;QAChF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,yFAAyF,CAAC,CAAC;SAC5G;QACD,KAAK,GAAG,MAAM,oBAAU,CACtB,+BAAkB,CAAC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACjE,MAAM,IAAI,CAAC,YAAY,EAAE,EACzB,IAAI,CAAC,MAAM,EAAE,EACb,MAAM,IAAI,CAAC,cAAc,EAAE,EAC3B,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAC3C,IAAI,CAAC,gBAAgB,EAAE,EACvB,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC,EAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EACvB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,KAAK,MAAM,EAClC,SAAS,EACT,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EACvB,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,EACxC,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,EACtD,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACrD,MAAM,EACN,MAAM,CAAC,CAAC;KAEX;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,yBAAyB,GAAG,CAAC,CAAC,CAAC;IAC9C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,uCAA6D;AAC7D,iDAA2C;AAC3C,uCAA6C;AAC7C,6CAAkD;AAClD,6CAA+B;AAM/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IACE,CAAC,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAC3B,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,UAAU,EAAE,SAAS,CAAC,EAClE,IAAI,CACL,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,MAAM,GAAG,MAAM,wBAAS,CAC5B,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EACvC,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,KAAK,GAAG,MAAM,oBAAU,CACtB,+BAAkB,CAAC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACjE,MAAM,IAAI,CAAC,YAAY,EAAE,EACzB,IAAI,CAAC,MAAM,EAAE,EACb,MAAM,IAAI,CAAC,cAAc,EAAE,EAC3B,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAC3C,IAAI,CAAC,gBAAgB,EAAE,EACvB,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC,EAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EACvB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,KAAK,MAAM,EAClC,SAAS,EACT,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EACvB,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,EACxC,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,EACtD,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACrD,MAAM,EACN,MAAM,CACP,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,0BAA0B,CAAC,EAAE,CAAC,CAAC;IAC9C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

26
lib/analyze.js generated
View File

@@ -22,7 +22,7 @@ async function createdDBForScannedLanguages(config, logger) {
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
if (languages_1.isScannedLanguage(language)) {
logger.startGroup('Extracting ' + language);
logger.startGroup(`Extracting ${language}`);
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config.tempDir, language), language);
logger.endGroup();
}
@@ -32,7 +32,7 @@ async function finalizeDatabaseCreation(config, logger) {
await createdDBForScannedLanguages(config, logger);
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
logger.startGroup('Finalizing ' + language);
logger.startGroup(`Finalizing ${language}`);
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config.tempDir, language));
logger.endGroup();
}
@@ -40,23 +40,23 @@ async function finalizeDatabaseCreation(config, logger) {
// Runs queries and creates sarif files in the given folder
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (let language of config.languages) {
logger.startGroup('Analyzing ' + language);
for (const language of config.languages) {
logger.startGroup(`Analyzing ${language}`);
const queries = config.queries[language] || [];
if (queries.length === 0) {
throw new Error('Unable to analyse ' + language + ' as no queries were selected for this language');
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
}
try {
const databasePath = util.getCodeQLDatabasePath(config.tempDir, language);
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuite = databasePath + '-queries.qls';
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
const querySuite = `${databasePath}-queries.qls`;
const querySuiteContents = queries.map((q) => `- query: ${q}`).join("\n");
fs.writeFileSync(querySuite, querySuiteContents);
logger.debug('Query suite file for ' + language + '...\n' + querySuiteContents);
const sarifFile = path.join(sarifFolder, language + '.sarif');
logger.debug(`Query suite file for ${language}...\n${querySuiteContents}`);
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
await codeql.databaseAnalyze(databasePath, sarifFile, querySuite, memoryFlag, addSnippetsFlag, threadsFlag);
logger.debug('SARIF results for database ' + language + ' created at "' + sarifFile + '"');
logger.debug(`SARIF results for database ${language} created at "${sarifFile}"`);
logger.endGroup();
}
catch (e) {
@@ -72,12 +72,12 @@ async function runAnalyze(repositoryNwo, commitOid, ref, analysisKey, analysisNa
// Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
fs.mkdirSync(outputDir, { recursive: true });
logger.info('Finalizing database creation');
logger.info("Finalizing database creation");
await finalizeDatabaseCreation(config, logger);
logger.info('Analyzing database');
logger.info("Analyzing database");
const queriesStats = await runQueries(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger);
if (!doUpload) {
logger.info('Not uploading results');
logger.info("Not uploading results");
return { ...queriesStats };
}
const uploadStats = await upload_lib.upload(outputDir, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger);

View File

@@ -1 +1 @@
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,gEAAkD;AAClD,qCAAqC;AAErC,2CAAgD;AAGhD,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAiC/B,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;YAC5C,MAAM,MAAM,CAAC,sBAAsB,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE,QAAQ,CAAC,CAAC;YACpG,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC,CAAC;QACpF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AAC3D,KAAK,UAAU,UAAU,CACvB,WAAmB,EACnB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,IAAI,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACrC,MAAM,CAAC,UAAU,CAAC,YAAY,GAAG,QAAQ,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QAC/C,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,oBAAoB,GAAG,QAAQ,GAAG,gDAAgD,CAAC,CAAC;SACrG;QAED,IAAI;YACF,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;YAC1E,uEAAuE;YACvE,2EAA2E;YAC3E,MAAM,UAAU,GAAG,YAAY,GAAG,cAAc,CAAC;YACjD,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACxE,EAAE,CAAC,aAAa,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;YACjD,MAAM,CAAC,KAAK,CAAC,uBAAuB,GAAG,QAAQ,GAAG,OAAO,GAAG,kBAAkB,CAAC,CAAC;YAEhF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,GAAG,QAAQ,CAAC,CAAC;YAE9D,MAAM,MAAM,CAAC,eAAe,CAAC,YAAY,EAAE,SAAS,EAAE,UAAU,EAAE,UAAU,EAAE,eAAe,EAAE,WAAW,CAAC,CAAC;YAE5G,MAAM,CAAC,KAAK,CAAC,6BAA6B,GAAG,QAAQ,GAAG,eAAe,GAAG,SAAS,GAAG,GAAG,CAAC,CAAC;YAC3F,MAAM,CAAC,QAAQ,EAAE,CAAC;SAEnB;QAAC,OAAO,CAAC,EAAE;YACV,+DAA+D;YAC/D,OAAO;gBACL,wBAAwB,EAAE,QAAQ;aACnC,CAAC;SACH;KACF;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAEM,KAAK,UAAU,UAAU,CAC9B,aAA4B,EAC5B,SAAiB,EACjB,GAAW,EACX,WAA+B,EAC/B,YAAgC,EAChC,aAAiC,EACjC,YAAoB,EACpB,WAA+B,EAC/B,UAAkB,EAClB,SAAiB,EACjB,QAAiB,EACjB,IAAe,EACf,SAAiB,EACjB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE/C,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,SAAS,EAAE,UAAU,EAAE,eAAe,EAAE,WAAW,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;IAE3G,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACrC,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;KAC5B;IAED,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,SAAS,EACT,aAAa,EACb,SAAS,EACT,GAAG,EACH,WAAW,EACX,YAAY,EACZ,aAAa,EACb,YAAY,EACZ,WAAW,EACX,UAAU,EACV,SAAS,EACT,IAAI,EACJ,MAAM,CAAC,CAAC;IAEV,OAAO,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;AAC7C,CAAC;AApDD,gCAoDC"}
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,gEAAkD;AAClD,qCAAqC;AAErC,2CAAgD;AAGhD,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAmC/B,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;YAC5C,MAAM,MAAM,CAAC,sBAAsB,CACjC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,CACT,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAC3B,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CACrD,CAAC;QACF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AAC3D,KAAK,UAAU,UAAU,CACvB,WAAmB,EACnB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,QAAQ,EAAE,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QAC/C,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,qBAAqB,QAAQ,gDAAgD,CAC9E,CAAC;SACH;QAED,IAAI;YACF,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;YAC1E,uEAAuE;YACvE,2EAA2E;YAC3E,MAAM,UAAU,GAAG,GAAG,YAAY,cAAc,CAAC;YACjD,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YAC1E,EAAE,CAAC,aAAa,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;YACjD,MAAM,CAAC,KAAK,CACV,wBAAwB,QAAQ,QAAQ,kBAAkB,EAAE,CAC7D,CAAC;YAEF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,GAAG,QAAQ,QAAQ,CAAC,CAAC;YAE9D,MAAM,MAAM,CAAC,eAAe,CAC1B,YAAY,EACZ,SAAS,EACT,UAAU,EACV,UAAU,EACV,eAAe,EACf,WAAW,CACZ,CAAC;YAEF,MAAM,CAAC,KAAK,CACV,8BAA8B,QAAQ,gBAAgB,SAAS,GAAG,CACnE,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;QAAC,OAAO,CAAC,EAAE;YACV,+DAA+D;YAC/D,OAAO;gBACL,wBAAwB,EAAE,QAAQ;aACnC,CAAC;SACH;KACF;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAEM,KAAK,UAAU,UAAU,CAC9B,aAA4B,EAC5B,SAAiB,EACjB,GAAW,EACX,WAA+B,EAC/B,YAAgC,EAChC,aAAiC,EACjC,YAAoB,EACpB,WAA+B,EAC/B,UAAkB,EAClB,SAAiB,EACjB,QAAiB,EACjB,IAAe,EACf,SAAiB,EACjB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE/C,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CACnC,SAAS,EACT,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,MAAM,CACP,CAAC;IAEF,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACrC,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;KAC5B;IAED,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,SAAS,EACT,aAAa,EACb,SAAS,EACT,GAAG,EACH,WAAW,EACX,YAAY,EACZ,aAAa,EACb,YAAY,EACZ,WAAW,EACX,UAAU,EACV,SAAS,EACT,IAAI,EACJ,MAAM,CACP,CAAC;IAEF,OAAO,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;AAC7C,CAAC;AA5DD,gCA4DC"}

12
lib/api-client.js generated
View File

@@ -17,31 +17,31 @@ const path = __importStar(require("path"));
const util_1 = require("./util");
exports.getApiClient = function (githubAuth, githubUrl, allowLocalRun = false) {
if (util_1.isLocalRun() && !allowLocalRun) {
throw new Error('Invalid API call in local run');
throw new Error("Invalid API call in local run");
}
return new github.GitHub({
auth: githubAuth,
baseUrl: getApiUrl(githubUrl),
userAgent: "CodeQL Action",
log: console_log_level_1.default({ level: "debug" })
log: console_log_level_1.default({ level: "debug" }),
});
};
function getApiUrl(githubUrl) {
const url = new URL(githubUrl);
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === 'github.com' || url.hostname === 'api.github.com') {
return 'https://api.github.com';
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
return "https://api.github.com";
}
// Add the /api/v3 API prefix
url.pathname = path.join(url.pathname, 'api', 'v3');
url.pathname = path.join(url.pathname, "api", "v3");
return url.toString();
}
// Temporary function to aid in the transition to running on and off of github actions.
// Once all code has been coverted this function should be removed or made canonical
// and called only from the action entrypoints.
function getActionsApiClient(allowLocalRun = false) {
return exports.getApiClient(core.getInput('token'), util_1.getRequiredEnvParam('GITHUB_SERVER_URL'), allowLocalRun);
return exports.getApiClient(core.getInput("token"), util_1.getRequiredEnvParam("GITHUB_SERVER_URL"), allowLocalRun);
}
exports.getActionsApiClient = getActionsApiClient;
//# sourceMappingURL=api-client.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAChD,2CAA6B;AAE7B,iCAAyD;AAE5C,QAAA,YAAY,GAAG,UAAS,UAAkB,EAAE,SAAiB,EAAE,aAAa,GAAG,KAAK;IAC/F,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,MAAM,CAAC,MAAM,CACtB;QACE,IAAI,EAAE,UAAU;QAChB,OAAO,EAAE,SAAS,CAAC,SAAS,CAAC;QAC7B,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACP,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,kDAAkD;IAClD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,oFAAoF;AACpF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,OAAO,oBAAY,CACjB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,0BAAmB,CAAC,mBAAmB,CAAC,EACxC,aAAa,CAAC,CAAC;AACnB,CAAC;AALD,kDAKC"}
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAChD,2CAA6B;AAE7B,iCAAyD;AAE5C,QAAA,YAAY,GAAG,UAC1B,UAAkB,EAClB,SAAiB,EACjB,aAAa,GAAG,KAAK;IAErB,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,MAAM,CAAC,MAAM,CAAC;QACvB,IAAI,EAAE,UAAU;QAChB,OAAO,EAAE,SAAS,CAAC,SAAS,CAAC;QAC7B,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACL,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,kDAAkD;IAClD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,oFAAoF;AACpF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,OAAO,oBAAY,CACjB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,0BAAmB,CAAC,mBAAmB,CAAC,EACxC,aAAa,CACd,CAAC;AACJ,CAAC;AAND,kDAMC"}

View File

@@ -14,11 +14,13 @@ const logging_1 = require("./logging");
const util = __importStar(require("./util"));
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
var _a, _b;
const status = failingLanguage !== undefined || cause !== undefined ? 'failure' : 'success';
const statusReportBase = await util.createStatusReportBase('autobuild', status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
const status = failingLanguage !== undefined || cause !== undefined
? "failure"
: "success";
const statusReportBase = await util.createStatusReportBase("autobuild", status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
const statusReport = {
...statusReportBase,
autobuild_languages: allLanguages.join(','),
autobuild_languages: allLanguages.join(","),
autobuild_failure: failingLanguage,
};
await util.sendStatusReport(statusReport);
@@ -29,10 +31,10 @@ async function run() {
let language = undefined;
try {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('autobuild', 'starting', startedAt), true)) {
if (!(await util.sendStatusReport(await util.createStatusReportBase("autobuild", "starting", startedAt), true))) {
return;
}
const config = await config_utils.getConfig(util.getRequiredEnvParam('RUNNER_TEMP'), logger);
const config = await config_utils.getConfig(util.getRequiredEnvParam("RUNNER_TEMP"), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
}
@@ -42,15 +44,15 @@ async function run() {
}
}
catch (error) {
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
core.setFailed(`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error.message}`);
console.log(error);
await sendCompletedStatusReport(startedAt, language ? [language] : [], language, error);
return;
}
await sendCompletedStatusReport(startedAt, language ? [language] : []);
}
run().catch(e => {
core.setFailed("autobuild action failed. " + e);
run().catch((e) => {
core.setFailed(`autobuild action failed. ${e}`);
console.log(e);
});
//# sourceMappingURL=autobuild-action.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,6CAA+B;AAS/B,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GAAG,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5F,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CAAC,CAAC;IAChB,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC7G,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CAAC,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAAE,MAAM,CAAC,CAAC;QAC7F,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,yFAAyF,CAAC,CAAC;SAC5G;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KAEF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;QACxF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,6CAA+B;AAS/B,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IACE,CAAC,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAC3B,MAAM,IAAI,CAAC,sBAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,EACrE,IAAI,CACL,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EACvC,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIAAmI,KAAK,CAAC,OAAO,EAAE,CACnJ,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,6BAA6B,CAAC,EAAE,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

4
lib/autobuild.js generated
View File

@@ -15,7 +15,9 @@ function determineAutobuildLanguage(config, logger) {
}
logger.debug(`Detected dominant traced language: ${language}`);
if (autobuildLanguages.length > 1) {
logger.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages.slice(1).join(' and ')}, you must replace this call with custom build steps.`);
logger.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages
.slice(1)
.join(" and ")}, you must replace this call with custom build steps.`);
}
return language;
}

View File

@@ -1 +1 @@
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;AAAA,qCAAqC;AAErC,2CAAyD;AAGzD,SAAgB,0BAA0B,CACxC,MAA2B,EAC3B,MAAc;IAGd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IACrE,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;IAEvC,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;QAC/E,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;IAE/D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QACjC,MAAM,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,uDAAuD,CAAC,CAAC;KAC5L;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AAxBD,gEAwBC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;AAAA,qCAAqC;AAErC,2CAAyD;AAGzD,SAAgB,0BAA0B,CACxC,MAA2B,EAC3B,MAAc;IAEd,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IACrE,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;IAEvC,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;IAE/D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QACjC,MAAM,CAAC,OAAO,CACZ,oCAAoC,QAAQ,8BAA8B,kBAAkB;aACzF,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CAAC,OAAO,CAAC,uDAAuD,CACxE,CAAC;KACH;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AA7BD,gEA6BC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}

230
lib/codeql.js generated
View File

@@ -31,7 +31,7 @@ const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
const CODEQL_BUNDLE_NAME = "codeql-bundle.tar.gz";
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
function getCodeQLActionRepository(mode) {
if (mode !== 'actions') {
if (mode !== "actions") {
return CODEQL_DEFAULT_ACTION_REPOSITORY;
}
// Actions do not know their own repository name,
@@ -42,11 +42,12 @@ function getCodeQLActionRepository(mode) {
const relativeScriptPath = path.relative(actionsDirectory, __filename);
// This handles the case where the Action does not come from an Action repository,
// e.g. our integration tests which use the Action code from the current checkout.
if (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath)) {
if (relativeScriptPath.startsWith("..") ||
path.isAbsolute(relativeScriptPath)) {
return CODEQL_DEFAULT_ACTION_REPOSITORY;
}
const relativeScriptPathParts = relativeScriptPath.split(path.sep);
return relativeScriptPathParts[0] + "/" + relativeScriptPathParts[1];
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
}
async function getCodeQLBundleDownloadURL(githubAuth, githubUrl, mode, logger) {
const codeQLActionRepository = getCodeQLActionRepository(mode);
@@ -61,20 +62,23 @@ async function getCodeQLBundleDownloadURL(githubAuth, githubUrl, mode, logger) {
// We now filter out any duplicates.
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url));
for (let downloadSource of uniqueDownloadSources) {
let [apiURL, repository] = downloadSource;
for (const downloadSource of uniqueDownloadSources) {
const [apiURL, repository] = downloadSource;
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
if (apiURL === util.GITHUB_DOTCOM_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
if (apiURL === util.GITHUB_DOTCOM_URL &&
repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
break;
}
let [repositoryOwner, repositoryName] = repository.split("/");
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release = await api.getApiClient(githubAuth, githubUrl).repos.getReleaseByTag({
const release = await api
.getApiClient(githubAuth, githubUrl)
.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: CODEQL_BUNDLE_VERSION
tag: CODEQL_BUNDLE_VERSION,
});
for (let asset of release.data.assets) {
for (const asset of release.data.assets) {
if (asset.name === CODEQL_BUNDLE_NAME) {
logger.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
return asset.url;
@@ -90,7 +94,7 @@ async function getCodeQLBundleDownloadURL(githubAuth, githubUrl, mode, logger) {
// We have to download CodeQL manually because the toolcache doesn't support Accept headers.
// This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released.
async function toolcacheDownloadTool(url, headers, tempDir, logger) {
const client = new http.HttpClient('CodeQL Action');
const client = new http.HttpClient("CodeQL Action");
const dest = path.join(tempDir, v4_1.default());
const response = await client.get(url, headers);
if (response.message.statusCode !== 200) {
@@ -106,11 +110,11 @@ async function setupCodeQL(codeqlURL, githubAuth, githubUrl, tempDir, toolsDir,
// Setting these two env vars makes the toolcache code safe to use outside,
// of actions but this is obviously not a great thing we're doing and it would
// be better to write our own implementation to use outside of actions.
process.env['RUNNER_TEMP'] = tempDir;
process.env['RUNNER_TOOL_CACHE'] = toolsDir;
process.env["RUNNER_TEMP"] = tempDir;
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
try {
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`, logger);
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
let codeqlFolder = toolcache.find("CodeQL", codeqlURLVersion);
if (codeqlFolder) {
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
}
@@ -118,29 +122,29 @@ async function setupCodeQL(codeqlURL, githubAuth, githubUrl, tempDir, toolsDir,
if (!codeqlURL) {
codeqlURL = await getCodeQLBundleDownloadURL(githubAuth, githubUrl, mode, logger);
}
const headers = { accept: 'application/octet-stream' };
const headers = { accept: "application/octet-stream" };
// We only want to provide an authorization header if we are downloading
// from the same GitHub instance the Action is running on.
// This avoids leaking Enterprise tokens to dotcom.
if (codeqlURL.startsWith(githubUrl + "/")) {
logger.debug('Downloading CodeQL bundle with token.');
if (codeqlURL.startsWith(`${githubUrl}/`)) {
logger.debug("Downloading CodeQL bundle with token.");
headers.authorization = `token ${githubAuth}`;
}
else {
logger.debug('Downloading CodeQL bundle without token.');
logger.debug("Downloading CodeQL bundle without token.");
}
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
let codeqlPath = await toolcacheDownloadTool(codeqlURL, headers, tempDir, logger);
const codeqlPath = await toolcacheDownloadTool(codeqlURL, headers, tempDir, logger);
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLVersion);
}
let codeqlCmd = path.join(codeqlFolder, 'codeql', 'codeql');
if (process.platform === 'win32') {
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
if (process.platform === "win32") {
codeqlCmd += ".exe";
}
else if (process.platform !== 'linux' && process.platform !== 'darwin') {
throw new Error("Unsupported platform: " + process.platform);
else if (process.platform !== "linux" && process.platform !== "darwin") {
throw new Error(`Unsupported platform: ${process.platform}`);
}
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
return cachedCodeQL;
@@ -159,7 +163,7 @@ function getCodeQLURLVersion(url, logger) {
let version = match[1];
if (!semver.valid(version)) {
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
version = '0.0.0-' + version;
version = `0.0.0-${version}`;
}
const s = semver.clean(version);
if (!s) {
@@ -179,12 +183,12 @@ function getCodeQL(cmd) {
}
exports.getCodeQL = getCodeQL;
function resolveFunction(partialCodeql, methodName, defaultImplementation) {
if (typeof partialCodeql[methodName] !== 'function') {
if (typeof partialCodeql[methodName] !== "function") {
if (defaultImplementation !== undefined) {
return defaultImplementation;
}
const dummyMethod = () => {
throw new Error('CodeQL ' + methodName + ' method not correctly defined');
throw new Error(`CodeQL ${methodName} method not correctly defined`);
};
return dummyMethod;
}
@@ -198,15 +202,15 @@ function resolveFunction(partialCodeql, methodName, defaultImplementation) {
*/
function setCodeQL(partialCodeql) {
cachedCodeQL = {
getPath: resolveFunction(partialCodeql, 'getPath', () => '/tmp/dummy-path'),
printVersion: resolveFunction(partialCodeql, 'printVersion'),
getTracerEnv: resolveFunction(partialCodeql, 'getTracerEnv'),
databaseInit: resolveFunction(partialCodeql, 'databaseInit'),
runAutobuild: resolveFunction(partialCodeql, 'runAutobuild'),
extractScannedLanguage: resolveFunction(partialCodeql, 'extractScannedLanguage'),
finalizeDatabase: resolveFunction(partialCodeql, 'finalizeDatabase'),
resolveQueries: resolveFunction(partialCodeql, 'resolveQueries'),
databaseAnalyze: resolveFunction(partialCodeql, 'databaseAnalyze')
getPath: resolveFunction(partialCodeql, "getPath", () => "/tmp/dummy-path"),
printVersion: resolveFunction(partialCodeql, "printVersion"),
getTracerEnv: resolveFunction(partialCodeql, "getTracerEnv"),
databaseInit: resolveFunction(partialCodeql, "databaseInit"),
runAutobuild: resolveFunction(partialCodeql, "runAutobuild"),
extractScannedLanguage: resolveFunction(partialCodeql, "extractScannedLanguage"),
finalizeDatabase: resolveFunction(partialCodeql, "finalizeDatabase"),
resolveQueries: resolveFunction(partialCodeql, "resolveQueries"),
databaseAnalyze: resolveFunction(partialCodeql, "databaseAnalyze"),
};
return cachedCodeQL;
}
@@ -220,25 +224,25 @@ exports.setCodeQL = setCodeQL;
function getCachedCodeQL() {
if (cachedCodeQL === undefined) {
// Should never happen as setCodeQL is called by testing-utils.setupTests
throw new Error('cachedCodeQL undefined');
throw new Error("cachedCodeQL undefined");
}
return cachedCodeQL;
}
exports.getCachedCodeQL = getCachedCodeQL;
function getCodeQLForCmd(cmd) {
return {
getPath: function () {
getPath() {
return cmd;
},
printVersion: async function () {
async printVersion() {
await new toolrunnner.ToolRunner(cmd, [
'version',
'--format=json'
"version",
"--format=json",
]).exec();
},
getTracerEnv: async function (databasePath) {
async getTracerEnv(databasePath) {
// Write tracer-env.js to a temp location.
const tracerEnvJs = path.resolve(databasePath, 'working', 'tracer-env.js');
const tracerEnvJs = path.resolve(databasePath, "working", "tracer-env.js");
fs.mkdirSync(path.dirname(tracerEnvJs), { recursive: true });
fs.writeFileSync(tracerEnvJs, `
const fs = require('fs');
@@ -252,119 +256,127 @@ function getCodeQLForCmd(cmd) {
}
process.stdout.write(process.argv[2]);
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');`);
const envFile = path.resolve(databasePath, 'working', 'env.tmp');
const envFile = path.resolve(databasePath, "working", "env.tmp");
await new toolrunnner.ToolRunner(cmd, [
'database',
'trace-command',
"database",
"trace-command",
databasePath,
...getExtraOptionsFromEnv(['database', 'trace-command']),
...getExtraOptionsFromEnv(["database", "trace-command"]),
process.execPath,
tracerEnvJs,
envFile
envFile,
]).exec();
return JSON.parse(fs.readFileSync(envFile, 'utf-8'));
return JSON.parse(fs.readFileSync(envFile, "utf-8"));
},
databaseInit: async function (databasePath, language, sourceRoot) {
async databaseInit(databasePath, language, sourceRoot) {
await new toolrunnner.ToolRunner(cmd, [
'database',
'init',
"database",
"init",
databasePath,
'--language=' + language,
'--source-root=' + sourceRoot,
...getExtraOptionsFromEnv(['database', 'init']),
`--language=${language}`,
`--source-root=${sourceRoot}`,
...getExtraOptionsFromEnv(["database", "init"]),
]).exec();
},
runAutobuild: async function (language) {
const cmdName = process.platform === 'win32' ? 'autobuild.cmd' : 'autobuild.sh';
const autobuildCmd = path.join(path.dirname(cmd), language, 'tools', cmdName);
async runAutobuild(language) {
const cmdName = process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh";
const autobuildCmd = path.join(path.dirname(cmd), language, "tools", cmdName);
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
// and Maven not properly handling closed connections
// Otherwise long build processes will timeout when pulling down Java packages
// https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html
let javaToolOptions = process.env['JAVA_TOOL_OPTIONS'] || "";
process.env['JAVA_TOOL_OPTIONS'] = [...javaToolOptions.split(/\s+/), '-Dhttp.keepAlive=false', '-Dmaven.wagon.http.pool=false'].join(' ');
const javaToolOptions = process.env["JAVA_TOOL_OPTIONS"] || "";
process.env["JAVA_TOOL_OPTIONS"] = [
...javaToolOptions.split(/\s+/),
"-Dhttp.keepAlive=false",
"-Dmaven.wagon.http.pool=false",
].join(" ");
await new toolrunnner.ToolRunner(autobuildCmd).exec();
},
extractScannedLanguage: async function (databasePath, language) {
async extractScannedLanguage(databasePath, language) {
// Get extractor location
let extractorPath = '';
let extractorPath = "";
await new toolrunnner.ToolRunner(cmd, [
'resolve',
'extractor',
'--format=json',
'--language=' + language,
...getExtraOptionsFromEnv(['resolve', 'extractor']),
"resolve",
"extractor",
"--format=json",
`--language=${language}`,
...getExtraOptionsFromEnv(["resolve", "extractor"]),
], {
silent: true,
listeners: {
stdout: (data) => { extractorPath += data.toString(); },
stderr: (data) => { process.stderr.write(data); }
}
stdout: (data) => {
extractorPath += data.toString();
},
stderr: (data) => {
process.stderr.write(data);
},
},
}).exec();
// Set trace command
const ext = process.platform === 'win32' ? '.cmd' : '.sh';
const traceCommand = path.resolve(JSON.parse(extractorPath), 'tools', 'autobuild' + ext);
const ext = process.platform === "win32" ? ".cmd" : ".sh";
const traceCommand = path.resolve(JSON.parse(extractorPath), "tools", `autobuild${ext}`);
// Run trace command
await new toolrunnner.ToolRunner(cmd, [
'database',
'trace-command',
...getExtraOptionsFromEnv(['database', 'trace-command']),
"database",
"trace-command",
...getExtraOptionsFromEnv(["database", "trace-command"]),
databasePath,
'--',
traceCommand
"--",
traceCommand,
]).exec();
},
finalizeDatabase: async function (databasePath) {
async finalizeDatabase(databasePath) {
await new toolrunnner.ToolRunner(cmd, [
'database',
'finalize',
...getExtraOptionsFromEnv(['database', 'finalize']),
databasePath
"database",
"finalize",
...getExtraOptionsFromEnv(["database", "finalize"]),
databasePath,
]).exec();
},
resolveQueries: async function (queries, extraSearchPath) {
async resolveQueries(queries, extraSearchPath) {
const codeqlArgs = [
'resolve',
'queries',
"resolve",
"queries",
...queries,
'--format=bylanguage',
...getExtraOptionsFromEnv(['resolve', 'queries'])
"--format=bylanguage",
...getExtraOptionsFromEnv(["resolve", "queries"]),
];
if (extraSearchPath !== undefined) {
codeqlArgs.push('--search-path', extraSearchPath);
codeqlArgs.push("--search-path", extraSearchPath);
}
let output = '';
let output = "";
await new toolrunnner.ToolRunner(cmd, codeqlArgs, {
listeners: {
stdout: (data) => {
output += data.toString();
}
}
},
},
}).exec();
return JSON.parse(output);
},
databaseAnalyze: async function (databasePath, sarifFile, querySuite, memoryFlag, addSnippetsFlag, threadsFlag) {
async databaseAnalyze(databasePath, sarifFile, querySuite, memoryFlag, addSnippetsFlag, threadsFlag) {
await new toolrunnner.ToolRunner(cmd, [
'database',
'analyze',
"database",
"analyze",
memoryFlag,
threadsFlag,
databasePath,
'--format=sarif-latest',
'--output=' + sarifFile,
"--format=sarif-latest",
`--output=${sarifFile}`,
addSnippetsFlag,
...getExtraOptionsFromEnv(['database', 'analyze']),
querySuite
...getExtraOptionsFromEnv(["database", "analyze"]),
querySuite,
]).exec();
}
},
};
}
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*/
function getExtraOptionsFromEnv(path) {
let options = util.getExtraOptionsEnvParam();
const options = util.getExtraOptionsEnvParam();
return getExtraOptions(options, path, []);
}
/**
@@ -385,22 +397,22 @@ function getExtraOptions(options, path, pathInfo) {
return [];
}
if (!Array.isArray(options)) {
const msg = `The extra options for '${pathInfo.join('.')}' ('${JSON.stringify(options)}') are not in an array.`;
const msg = `The extra options for '${pathInfo.join(".")}' ('${JSON.stringify(options)}') are not in an array.`;
throw new Error(msg);
}
return options.map(o => {
return options.map((o) => {
const t = typeof o;
if (t !== 'string' && t !== 'number' && t !== 'boolean') {
const msg = `The extra option for '${pathInfo.join('.')}' ('${JSON.stringify(o)}') is not a primitive value.`;
if (t !== "string" && t !== "number" && t !== "boolean") {
const msg = `The extra option for '${pathInfo.join(".")}' ('${JSON.stringify(o)}') is not a primitive value.`;
throw new Error(msg);
}
return o + '';
return `${o}`;
});
}
let all = asExtraOptions((_a = options) === null || _a === void 0 ? void 0 : _a['*'], pathInfo.concat('*'));
let specific = path.length === 0 ?
asExtraOptions(options, pathInfo) :
getExtraOptions((_b = options) === null || _b === void 0 ? void 0 : _b[path[0]], (_c = path) === null || _c === void 0 ? void 0 : _c.slice(1), pathInfo.concat(path[0]));
const all = asExtraOptions((_a = options) === null || _a === void 0 ? void 0 : _a["*"], pathInfo.concat("*"));
const specific = path.length === 0
? asExtraOptions(options, pathInfo)
: getExtraOptions((_b = options) === null || _b === void 0 ? void 0 : _b[path[0]], (_c = path) === null || _c === void 0 ? void 0 : _c.slice(1), pathInfo.concat(path[0]));
return all.concat(specific);
}
exports.getExtraOptions = getExtraOptions;

File diff suppressed because one or more lines are too long

62
lib/codeql.test.js generated
View File

@@ -19,29 +19,29 @@ const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
ava_1.default('download codeql bundle cache', async (t) => {
ava_1.default("download codeql bundle cache", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const versions = ['20200601', '20200610'];
const versions = ["20200601", "20200610"];
for (let i = 0; i < versions.length; i++) {
const version = versions[i];
nock_1.default('https://example.com')
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, 'token', 'https://github.example.com', tmpDir, tmpDir, 'runner', logging_1.getRunnerLogger(true));
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, "token", "https://github.example.com", tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
}
const cachedVersions = toolcache.findAllVersions('CodeQL');
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
ava_1.default('parse codeql bundle url version', t => {
ava_1.default("parse codeql bundle url version", (t) => {
const tests = {
'20200601': '0.0.0-20200601',
'20200601.0': '0.0.0-20200601.0',
'20200601.0.0': '20200601.0.0',
'1.2.3': '1.2.3',
'1.2.3-alpha': '1.2.3-alpha',
'1.2.3-beta.1': '1.2.3-beta.1',
"20200601": "0.0.0-20200601",
"20200601.0": "0.0.0-20200601.0",
"20200601.0.0": "20200601.0.0",
"1.2.3": "1.2.3",
"1.2.3-alpha": "1.2.3-alpha",
"1.2.3-beta.1": "1.2.3-beta.1",
};
for (const [version, expectedVersion] of Object.entries(tests)) {
const url = `https://github.com/.../codeql-bundle-${version}/...`;
@@ -54,26 +54,26 @@ ava_1.default('parse codeql bundle url version', t => {
}
}
});
ava_1.default('getExtraOptions works for explicit paths', t => {
t.deepEqual(codeql.getExtraOptions({}, ['foo'], []), []);
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ['foo'], []), ['42']);
t.deepEqual(codeql.getExtraOptions({ foo: { bar: [42] } }, ['foo', 'bar'], []), ['42']);
ava_1.default("getExtraOptions works for explicit paths", (t) => {
t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []);
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]);
t.deepEqual(codeql.getExtraOptions({ foo: { bar: [42] } }, ["foo", "bar"], []), ["42"]);
});
ava_1.default('getExtraOptions works for wildcards', t => {
t.deepEqual(codeql.getExtraOptions({ '*': [42] }, ['foo'], []), ['42']);
ava_1.default("getExtraOptions works for wildcards", (t) => {
t.deepEqual(codeql.getExtraOptions({ "*": [42] }, ["foo"], []), ["42"]);
});
ava_1.default('getExtraOptions works for wildcards and explicit paths', t => {
let o1 = { '*': [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o1, ['foo'], []), ['42', '87']);
let o2 = { '*': [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o2, ['foo', 'bar'], []), ['42']);
let o3 = { '*': [42], foo: { '*': [87], bar: [99] } };
let p = ['foo', 'bar'];
t.deepEqual(codeql.getExtraOptions(o3, p, []), ['42', '87', '99']);
ava_1.default("getExtraOptions works for wildcards and explicit paths", (t) => {
const o1 = { "*": [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o1, ["foo"], []), ["42", "87"]);
const o2 = { "*": [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o2, ["foo", "bar"], []), ["42"]);
const o3 = { "*": [42], foo: { "*": [87], bar: [99] } };
const p = ["foo", "bar"];
t.deepEqual(codeql.getExtraOptions(o3, p, []), ["42", "87", "99"]);
});
ava_1.default('getExtraOptions throws for bad content', t => {
t.throws(() => codeql.getExtraOptions({ '*': 42 }, ['foo'], []));
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ['foo'], []));
t.throws(() => codeql.getExtraOptions({ '*': [42], foo: { '*': 87, bar: [99] } }, ['foo', 'bar'], []));
ava_1.default("getExtraOptions throws for bad content", (t) => {
t.throws(() => codeql.getExtraOptions({ "*": 42 }, ["foo"], []));
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], []));
t.throws(() => codeql.getExtraOptions({ "*": [42], foo: { "*": 87, bar: [99] } }, ["foo", "bar"], []));
});
//# sourceMappingURL=codeql.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"codeql.test.js","sourceRoot":"","sources":["../src/codeql.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,iDAAmC;AACnC,uCAA4C;AAC5C,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAErF,MAAM,MAAM,CAAC,WAAW,CACtB,8CAA8C,OAAO,uBAAuB,EAC5E,OAAO,EACP,4BAA4B,EAC5B,MAAM,EACN,MAAM,EACN,QAAQ,EACR,yBAAe,CAAC,IAAI,CAAC,CAAC,CAAC;YAEzB,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,MAAM,CAAC,mBAAmB,CAAC,GAAG,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CAAC;YAC7E,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,0CAA0C,EAAE,CAAC,CAAC,EAAE;IACnD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;IAEzD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;IAEtE,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AACtF,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE;IAC9C,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AACxE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,wDAAwD,EAAE,CAAC,CAAC,EAAE;IACjE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,CAAC;IAChC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;IAEnE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,CAAC;IAChC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;IAEpE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,CAAC;IACnD,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;IACvB,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;AACrE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,wCAAwC,EAAE,CAAC,CAAC,EAAE;IACjD,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAE,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAE/D,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAE,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAE/D,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;AACtG,CAAC,CAAC,CAAC"}
{"version":3,"file":"codeql.test.js","sourceRoot":"","sources":["../src/codeql.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,iDAAmC;AACnC,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CACZ,GAAG,EACH,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAC9D,CAAC;YAEJ,MAAM,MAAM,CAAC,WAAW,CACtB,8CAA8C,OAAO,uBAAuB,EAC5E,OAAO,EACP,4BAA4B,EAC5B,MAAM,EACN,MAAM,EACN,QAAQ,EACR,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YAEF,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,MAAM,CAAC,mBAAmB,CAC9C,GAAG,EACH,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,0CAA0C,EAAE,CAAC,CAAC,EAAE,EAAE;IACrD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;IAEzD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;IAExE,CAAC,CAAC,SAAS,CACT,MAAM,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,EAClE,CAAC,IAAI,CAAC,CACP,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE,EAAE;IAChD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1E,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,wDAAwD,EAAE,CAAC,CAAC,EAAE,EAAE;IACnE,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IACpC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;IAEnE,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IACpC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;IAEpE,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;IACxD,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;IACzB,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;AACrE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,wCAAwC,EAAE,CAAC,CAAC,EAAE,EAAE;IACnD,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAEjE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAEjE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,MAAM,CAAC,eAAe,CACpB,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAC1C,CAAC,KAAK,EAAE,KAAK,CAAC,EACd,EAAE,CACH,CACF,CAAC;AACJ,CAAC,CAAC,CAAC"}

191
lib/config-utils.js generated
View File

@@ -14,12 +14,12 @@ const api = __importStar(require("./api-client"));
const externalQueries = __importStar(require("./external-queries"));
const languages_1 = require("./languages");
// Property names from the user-supplied config file.
const NAME_PROPERTY = 'name';
const DISABLE_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
const QUERIES_PROPERTY = 'queries';
const QUERIES_USES_PROPERTY = 'uses';
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
const PATHS_PROPERTY = 'paths';
const NAME_PROPERTY = "name";
const DISABLE_DEFAULT_QUERIES_PROPERTY = "disable-default-queries";
const QUERIES_PROPERTY = "queries";
const QUERIES_USES_PROPERTY = "uses";
const PATHS_IGNORE_PROPERTY = "paths-ignore";
const PATHS_PROPERTY = "paths";
/**
* A list of queries from https://github.com/github/codeql that
* we don't want to run. Disabling them here is a quicker alternative to
@@ -30,14 +30,13 @@ const PATHS_PROPERTY = 'paths';
* Format is a map from language to an array of path suffixes of .ql files.
*/
const DISABLED_BUILTIN_QUERIES = {
'csharp': [
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
]
csharp: [
"ql/src/Security Features/CWE-937/VulnerablePackage.ql",
"ql/src/Security Features/CWE-451/MissingXFrameOptions.ql",
],
};
function queryIsDisabled(language, query) {
return (DISABLED_BUILTIN_QUERIES[language] || [])
.some(disabledQuery => query.endsWith(disabledQuery));
return (DISABLED_BUILTIN_QUERIES[language] || []).some((disabledQuery) => query.endsWith(disabledQuery));
}
/**
* Asserts that the noDeclaredLanguage and multipleDeclaredLanguages fields are
@@ -47,16 +46,14 @@ function validateQueries(resolvedQueries) {
const noDeclaredLanguage = resolvedQueries.noDeclaredLanguage;
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
if (noDeclaredLanguageQueries.length !== 0) {
throw new Error('The following queries do not declare a language. ' +
'Their qlpack.yml files are either missing or is invalid.\n' +
noDeclaredLanguageQueries.join('\n'));
throw new Error(`${"The following queries do not declare a language. " +
"Their qlpack.yml files are either missing or is invalid.\n"}${noDeclaredLanguageQueries.join("\n")}`);
}
const multipleDeclaredLanguages = resolvedQueries.multipleDeclaredLanguages;
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
if (multipleDeclaredLanguagesQueries.length !== 0) {
throw new Error('The following queries declare multiple languages. ' +
'Their qlpack.yml files are either missing or is invalid.\n' +
multipleDeclaredLanguagesQueries.join('\n'));
throw new Error(`${"The following queries declare multiple languages. " +
"Their qlpack.yml files are either missing or is invalid.\n"}${multipleDeclaredLanguagesQueries.join("\n")}`);
}
}
/**
@@ -68,7 +65,7 @@ async function runResolveQueries(codeQL, resultMap, toResolve, extraSearchPath,
if (resultMap[language] === undefined) {
resultMap[language] = [];
}
resultMap[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
resultMap[language].push(...Object.keys(queries).filter((q) => !queryIsDisabled(language, q)));
}
if (errorOnInvalidQueries) {
validateQueries(resolvedQueries);
@@ -78,11 +75,11 @@ async function runResolveQueries(codeQL, resultMap, toResolve, extraSearchPath,
* Get the set of queries included by default.
*/
async function addDefaultQueries(codeQL, languages, resultMap) {
const suites = languages.map(l => l + '-code-scanning.qls');
const suites = languages.map((l) => `${l}-code-scanning.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined, false);
}
// The set of acceptable values for built-in suites from the codeql bundle
const builtinSuites = ['security-extended', 'security-and-quality'];
const builtinSuites = ["security-extended", "security-and-quality"];
/**
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
* Throws an error if suiteName is not a valid builtin suite.
@@ -92,7 +89,7 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, c
if (!suite) {
throw new Error(getQueryUsesInvalid(configFile, suiteName));
}
const suites = languages.map(l => l + '-' + suiteName + '.qls');
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined, false);
}
/**
@@ -118,12 +115,12 @@ async function addLocalQueries(codeQL, resultMap, localQueryPath, checkoutPath,
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
*/
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile) {
let tok = queryUses.split('@');
let tok = queryUses.split("@");
if (tok.length !== 2) {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
const ref = tok[1];
tok = tok[0].split('/');
tok = tok[0].split("/");
// The first token is the owner
// The second token is the repo
// The rest is a path, if there is more than one token combine them to form the full path
@@ -131,14 +128,14 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
// Check none of the parts of the repository name are empty
if (tok[0].trim() === '' || tok[1].trim() === '') {
if (tok[0].trim() === "" || tok[1].trim() === "") {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
const nwo = tok[0] + '/' + tok[1];
const nwo = `${tok[0]}/${tok[1]}`;
// Checkout the external repository
const checkoutPath = await externalQueries.checkoutExternalRepository(nwo, ref, githubUrl, tempDir, logger);
const queryPath = tok.length > 2
? path.join(checkoutPath, tok.slice(2).join('/'))
? path.join(checkoutPath, tok.slice(2).join("/"))
: checkoutPath;
await runResolveQueries(codeQL, resultMap, [queryPath], checkoutPath, true);
}
@@ -161,7 +158,7 @@ async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir,
return;
}
// Check for one of the builtin suites
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
await addBuiltinSuiteQueries(languages, codeQL, resultMap, queryUses, configFile);
return;
}
@@ -181,35 +178,35 @@ function validateAndSanitisePath(originalPath, propertyName, configFile, logger)
// Take a copy so we don't modify the original path, so we can still construct error messages
let path = originalPath;
// All paths are relative to the src root, so strip off leading slashes.
while (path.charAt(0) === '/') {
while (path.charAt(0) === "/") {
path = path.substring(1);
}
// Trailing ** are redundant, so strip them off
if (path.endsWith('/**')) {
if (path.endsWith("/**")) {
path = path.substring(0, path.length - 2);
}
// An empty path is not allowed as it's meaningless
if (path === '') {
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" is not an invalid path. ' +
'It is not necessary to include it, and it is not allowed to exclude it.'));
if (path === "") {
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" is not an invalid path. ` +
`It is not necessary to include it, and it is not allowed to exclude it.`));
}
// Check for illegal uses of **
if (path.match(pathStarsRegex)) {
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an invalid "**" wildcard. ' +
'They must be immediately preceeded and followed by a slash as in "/**/", or come at the start or end.'));
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an invalid "**" wildcard. ` +
`They must be immediately preceeded and followed by a slash as in "/**/", or come at the start or end.`));
}
// Check for other regex characters that we don't support.
// Output a warning so the user knows, but otherwise continue normally.
if (path.match(filterPatternCharactersRegex)) {
logger.warning(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an unsupported character. ' +
'The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.'));
logger.warning(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an unsupported character. ` +
`The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.`));
}
// Ban any uses of backslash for now.
// This may not play nicely with project layouts.
// This restriction can be lifted later if we determine they are ok.
if (path.indexOf('\\') !== -1) {
throw new Error(getConfigFilePropertyError(configFile, propertyName, '"' + originalPath + '" contains an "\\" character. These are not allowed in filters. ' +
'If running on windows we recommend using "/" instead for path filters.'));
if (path.indexOf("\\") !== -1) {
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an "\\" character. These are not allowed in filters. ` +
`If running on windows we recommend using "/" instead for path filters.`));
}
return path;
}
@@ -217,76 +214,74 @@ exports.validateAndSanitisePath = validateAndSanitisePath;
// An undefined configFile in some of these functions indicates that
// the property was in a workflow file, not a config file
function getNameInvalid(configFile) {
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
return getConfigFilePropertyError(configFile, NAME_PROPERTY, "must be a non-empty string");
}
exports.getNameInvalid = getNameInvalid;
function getDisableDefaultQueriesInvalid(configFile) {
return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, "must be a boolean");
}
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
function getQueriesInvalid(configFile) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, "must be an array");
}
exports.getQueriesInvalid = getQueriesInvalid;
function getQueryUsesInvalid(configFile, queryUses) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'must be a built-in suite (' + builtinSuites.join(' or ') +
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `must be a built-in suite (${builtinSuites.join(" or ")}), a relative path, or be of the form "owner/repo[/path]@ref"${queryUses !== undefined ? `\n Found: ${queryUses}` : ""}`);
}
exports.getQueryUsesInvalid = getQueryUsesInvalid;
function getPathsIgnoreInvalid(configFile) {
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, "must be an array of non-empty strings");
}
exports.getPathsIgnoreInvalid = getPathsIgnoreInvalid;
function getPathsInvalid(configFile) {
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, "must be an array of non-empty strings");
}
exports.getPathsInvalid = getPathsInvalid;
function getLocalPathOutsideOfRepository(configFile, localPath) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" is outside of the repository');
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `is invalid as the local path "${localPath}" is outside of the repository`);
}
exports.getLocalPathOutsideOfRepository = getLocalPathOutsideOfRepository;
function getLocalPathDoesNotExist(configFile, localPath) {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY, 'is invalid as the local path "' + localPath + '" does not exist in the repository');
return getConfigFilePropertyError(configFile, `${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`, `is invalid as the local path "${localPath}" does not exist in the repository`);
}
exports.getLocalPathDoesNotExist = getLocalPathDoesNotExist;
function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
return 'The configuration file "' + configFile + '" is outside of the workspace';
return `The configuration file "${configFile}" is outside of the workspace`;
}
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
function getConfigFileDoesNotExistErrorMessage(configFile) {
return 'The configuration file "' + configFile + '" does not exist';
return `The configuration file "${configFile}" does not exist`;
}
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
function getConfigFileRepoFormatInvalidMessage(configFile) {
let error = 'The configuration file "' + configFile + '" is not a supported remote file reference.';
error += ' Expected format <owner>/<repository>/<file-path>@<ref>';
let error = `The configuration file "${configFile}" is not a supported remote file reference.`;
error += " Expected format <owner>/<repository>/<file-path>@<ref>";
return error;
}
exports.getConfigFileRepoFormatInvalidMessage = getConfigFileRepoFormatInvalidMessage;
function getConfigFileFormatInvalidMessage(configFile) {
return 'The configuration file "' + configFile + '" could not be read';
return `The configuration file "${configFile}" could not be read`;
}
exports.getConfigFileFormatInvalidMessage = getConfigFileFormatInvalidMessage;
function getConfigFileDirectoryGivenMessage(configFile) {
return 'The configuration file "' + configFile + '" looks like a directory, not a file';
return `The configuration file "${configFile}" looks like a directory, not a file`;
}
exports.getConfigFileDirectoryGivenMessage = getConfigFileDirectoryGivenMessage;
function getConfigFilePropertyError(configFile, property, error) {
if (configFile === undefined) {
return 'The workflow property "' + property + '" is invalid: ' + error;
return `The workflow property "${property}" is invalid: ${error}`;
}
else {
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
return `The configuration file "${configFile}" is invalid: property "${property}" ${error}`;
}
}
function getNoLanguagesError() {
return "Did not detect any languages to analyze. " +
"Please update input in workflow or check that GitHub detects the correct languages in your repository.";
return ("Did not detect any languages to analyze. " +
"Please update input in workflow or check that GitHub detects the correct languages in your repository.");
}
exports.getNoLanguagesError = getNoLanguagesError;
function getUnknownLanguagesError(languages) {
return "Did not recognise the following languages: " + languages.join(', ');
return `Did not recognise the following languages: ${languages.join(", ")}`;
}
exports.getUnknownLanguagesError = getUnknownLanguagesError;
/**
@@ -294,18 +289,20 @@ exports.getUnknownLanguagesError = getUnknownLanguagesError;
*/
async function getLanguagesInRepo(repository, githubAuth, githubUrl, logger) {
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
const response = await api.getApiClient(githubAuth, githubUrl, true).repos.listLanguages({
const response = await api
.getApiClient(githubAuth, githubUrl, true)
.repos.listLanguages({
owner: repository.owner,
repo: repository.repo
repo: repository.repo,
});
logger.debug("Languages API response: " + JSON.stringify(response));
logger.debug(`Languages API response: ${JSON.stringify(response)}`);
// The GitHub API is going to return languages in order of popularity,
// When we pick a language to autobuild we want to pick the most popular traced language
// Since sets in javascript maintain insertion order, using a set here and then splatting it
// into an array gives us an array of languages ordered by popularity
let languages = new Set();
for (let lang of Object.keys(response.data)) {
let parsedLang = languages_1.parseLanguage(lang);
const languages = new Set();
for (const lang of Object.keys(response.data)) {
const parsedLang = languages_1.parseLanguage(lang);
if (parsedLang !== undefined) {
languages.add(parsedLang);
}
@@ -325,14 +322,14 @@ async function getLanguagesInRepo(repository, githubAuth, githubUrl, logger) {
async function getLanguages(languagesInput, repository, githubAuth, githubUrl, logger) {
// Obtain from action input 'languages' if set
let languages = (languagesInput || "")
.split(',')
.map(x => x.trim())
.filter(x => x.length > 0);
logger.info("Languages from configuration: " + JSON.stringify(languages));
.split(",")
.map((x) => x.trim())
.filter((x) => x.length > 0);
logger.info(`Languages from configuration: ${JSON.stringify(languages)}`);
if (languages.length === 0) {
// Obtain languages as all languages in the repo that can be analysed
languages = await getLanguagesInRepo(repository, githubAuth, githubUrl, logger);
logger.info("Automatically detected languages: " + JSON.stringify(languages));
logger.info(`Automatically detected languages: ${JSON.stringify(languages)}`);
}
// If the languages parameter was not given and no languages were
// detected then fail here as this is a workflow configuration error.
@@ -342,7 +339,7 @@ async function getLanguages(languagesInput, repository, githubAuth, githubUrl, l
// Make sure they are supported
const parsedLanguages = [];
const unknownLanguages = [];
for (let language of languages) {
for (const language of languages) {
const parsedLanguage = languages_1.parseLanguage(language);
if (parsedLanguage === undefined) {
unknownLanguages.push(language);
@@ -359,8 +356,8 @@ async function getLanguages(languagesInput, repository, githubAuth, githubUrl, l
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, githubUrl, logger) {
queriesInput = queriesInput.trim();
// "+" means "don't override config file" - see shouldAddConfigFileQueries
queriesInput = queriesInput.replace(/^\+/, '');
for (const query of queriesInput.split(',')) {
queriesInput = queriesInput.replace(/^\+/, "");
for (const query of queriesInput.split(",")) {
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, githubUrl, logger);
}
}
@@ -370,7 +367,7 @@ async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap
// should instead be added in addition
function shouldAddConfigFileQueries(queriesInput) {
if (queriesInput) {
return queriesInput.trimStart().substr(0, 1) === '+';
return queriesInput.trimStart().substr(0, 1) === "+";
}
return true;
}
@@ -385,8 +382,8 @@ async function getDefaultConfig(languagesInput, queriesInput, repository, tempDi
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, githubUrl, logger);
}
return {
languages: languages,
queries: queries,
languages,
queries,
pathsIgnore: [],
paths: [],
originalUserInput: {},
@@ -440,12 +437,14 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, githubUrl, logger);
}
if (shouldAddConfigFileQueries(queriesInput) && QUERIES_PROPERTY in parsedYAML) {
if (shouldAddConfigFileQueries(queriesInput) &&
QUERIES_PROPERTY in parsedYAML) {
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
throw new Error(getQueriesInvalid(configFile));
}
for (const query of parsedYAML[QUERIES_PROPERTY]) {
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
if (!(QUERIES_USES_PROPERTY in query) ||
typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile));
}
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, githubUrl, logger, configFile);
@@ -455,8 +454,8 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
throw new Error(getPathsIgnoreInvalid(configFile));
}
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
if (typeof path !== "string" || path === '') {
parsedYAML[PATHS_IGNORE_PROPERTY].forEach((path) => {
if (typeof path !== "string" || path === "") {
throw new Error(getPathsIgnoreInvalid(configFile));
}
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile, logger));
@@ -466,8 +465,8 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
throw new Error(getPathsInvalid(configFile));
}
parsedYAML[PATHS_PROPERTY].forEach(path => {
if (typeof path !== "string" || path === '') {
parsedYAML[PATHS_PROPERTY].forEach((path) => {
if (typeof path !== "string" || path === "") {
throw new Error(getPathsInvalid(configFile));
}
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile, logger));
@@ -502,7 +501,7 @@ async function initConfig(languagesInput, queriesInput, configFile, repository,
let config;
// If no config file was provided create an empty one
if (!configFile) {
logger.debug('No configuration file was provided');
logger.debug("No configuration file was provided");
config = await getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger);
}
else {
@@ -518,7 +517,7 @@ function isLocal(configPath) {
if (configPath.indexOf("./") === 0) {
return true;
}
return (configPath.indexOf("@") === -1);
return configPath.indexOf("@") === -1;
}
function getLocalConfig(configFile, checkoutPath) {
// Error if the config file is now outside of the workspace
@@ -529,17 +528,19 @@ function getLocalConfig(configFile, checkoutPath) {
if (!fs.existsSync(configFile)) {
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
}
return yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
return yaml.safeLoad(fs.readFileSync(configFile, "utf8"));
}
async function getRemoteConfig(configFile, githubAuth, githubUrl) {
// retrieve the various parts of the config location, and ensure they're present
const format = new RegExp('(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)');
const format = new RegExp("(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)");
const pieces = format.exec(configFile);
// 5 = 4 groups + the whole expression
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
}
const response = await api.getApiClient(githubAuth, githubUrl, true).repos.getContents({
const response = await api
.getApiClient(githubAuth, githubUrl, true)
.repos.getContents({
owner: pieces.groups.owner,
repo: pieces.groups.repo,
path: pieces.groups.path,
@@ -555,13 +556,13 @@ async function getRemoteConfig(configFile, githubAuth, githubUrl) {
else {
throw new Error(getConfigFileFormatInvalidMessage(configFile));
}
return yaml.safeLoad(Buffer.from(fileContents, 'base64').toString('binary'));
return yaml.safeLoad(Buffer.from(fileContents, "base64").toString("binary"));
}
/**
* Get the file path where the parsed config will be stored.
*/
function getPathToParsedConfigFile(tempDir) {
return path.join(tempDir, 'config');
return path.join(tempDir, "config");
}
exports.getPathToParsedConfigFile = getPathToParsedConfigFile;
/**
@@ -571,8 +572,8 @@ async function saveConfig(config, logger) {
const configString = JSON.stringify(config);
const configFile = getPathToParsedConfigFile(config.tempDir);
fs.mkdirSync(path.dirname(configFile), { recursive: true });
fs.writeFileSync(configFile, configString, 'utf8');
logger.debug('Saved config:');
fs.writeFileSync(configFile, configString, "utf8");
logger.debug("Saved config:");
logger.debug(configString);
}
/**
@@ -584,8 +585,8 @@ async function getConfig(tempDir, logger) {
if (!fs.existsSync(configFile)) {
return undefined;
}
const configString = fs.readFileSync(configFile, 'utf8');
logger.debug('Loaded config:');
const configString = fs.readFileSync(configFile, "utf8");
logger.debug("Loaded config:");
logger.debug(configString);
return JSON.parse(configString);
}

File diff suppressed because one or more lines are too long

335
lib/config-utils.test.js generated
View File

@@ -25,23 +25,25 @@ const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
// Returns the filepath of the newly-created file
function createConfigFile(inputFileContents, tmpDir) {
const configFilePath = path.join(tmpDir, 'input');
fs.writeFileSync(configFilePath, inputFileContents, 'utf8');
const configFilePath = path.join(tmpDir, "input");
fs.writeFileSync(configFilePath, inputFileContents, "utf8");
return configFilePath;
}
function mockGetContents(content) {
// Passing an auth token is required, so we just use a dummy value
let client = new github.GitHub('123');
const client = new github.GitHub("123");
const response = {
data: content
data: content,
};
const spyGetContents = sinon_1.default.stub(client.repos, "getContents").resolves(response);
const spyGetContents = sinon_1.default
.stub(client.repos, "getContents")
.resolves(response);
sinon_1.default.stub(api, "getApiClient").value(() => client);
return spyGetContents;
}
function mockListLanguages(languages) {
// Passing an auth token is required, so we just use a dummy value
let client = new github.GitHub('123');
const client = new github.GitHub("123");
const response = {
data: {},
};
@@ -54,9 +56,9 @@ function mockListLanguages(languages) {
ava_1.default("load empty config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const logger = logging_1.getRunnerLogger(true);
const languages = 'javascript,python';
const languages = "javascript,python";
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function () {
async resolveQueries() {
return {
byLanguage: {},
noDeclaredLanguage: {},
@@ -64,15 +66,15 @@ ava_1.default("load empty config", async (t) => {
};
},
});
const config = await configUtils.initConfig(languages, undefined, undefined, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logger);
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logger));
const config = await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logger);
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logger));
});
});
ava_1.default("loading config saves config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const logger = logging_1.getRunnerLogger(true);
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function () {
async resolveQueries() {
return {
byLanguage: {},
noDeclaredLanguage: {},
@@ -84,7 +86,7 @@ ava_1.default("loading config saves config", async (t) => {
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// Sanity check that getConfig returns undefined before we have called initConfig
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
const config1 = await configUtils.initConfig('javascript,python', undefined, undefined, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logger);
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logger);
// The saved config file should now exist
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// And that same newly-initialised config should now be returned by getConfig
@@ -95,50 +97,50 @@ ava_1.default("loading config saves config", async (t) => {
ava_1.default("load input outside of workspace", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
try {
await configUtils.initConfig(undefined, undefined, '../input', { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
throw new Error('initConfig did not throw error');
await configUtils.initConfig(undefined, undefined, "../input", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, "../input"))));
}
});
});
ava_1.default("load non-local input with invalid repo syntax", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
// no filename given, just a repo
const configFile = 'octo-org/codeql-config@main';
const configFile = "octo-org/codeql-config@main";
try {
await configUtils.initConfig(undefined, undefined, configFile, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
throw new Error('initConfig did not throw error');
await configUtils.initConfig(undefined, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage('octo-org/codeql-config@main')));
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage("octo-org/codeql-config@main")));
}
});
});
ava_1.default("load non-existent input", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const languages = 'javascript';
const configFile = 'input';
const languages = "javascript";
const configFile = "input";
t.false(fs.existsSync(path.join(tmpDir, configFile)));
try {
await configUtils.initConfig(languages, undefined, configFile, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
throw new Error('initConfig did not throw error');
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, "input"))));
}
});
});
ava_1.default("load non-empty input", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function () {
async resolveQueries() {
return {
byLanguage: {
'javascript': {
'/foo/a.ql': {},
'/bar/b.ql': {},
javascript: {
"/foo/a.ql": {},
"/bar/b.ql": {},
},
},
noDeclaredLanguage: {},
@@ -157,27 +159,27 @@ ava_1.default("load non-empty input", async (t) => {
- b
paths:
- c/d`;
fs.mkdirSync(path.join(tmpDir, 'foo'));
fs.mkdirSync(path.join(tmpDir, "foo"));
// And the config we expect it to parse to
const expectedConfig = {
languages: [languages_1.Language.javascript],
queries: { 'javascript': ['/foo/a.ql', '/bar/b.ql'] },
pathsIgnore: ['a', 'b'],
paths: ['c/d'],
queries: { javascript: ["/foo/a.ql", "/bar/b.ql"] },
pathsIgnore: ["a", "b"],
paths: ["c/d"],
originalUserInput: {
name: 'my config',
'disable-default-queries': true,
queries: [{ uses: './foo' }],
'paths-ignore': ['a', 'b'],
paths: ['c/d'],
name: "my config",
"disable-default-queries": true,
queries: [{ uses: "./foo" }],
"paths-ignore": ["a", "b"],
paths: ["c/d"],
},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: codeQL.getPath(),
};
const languages = 'javascript';
const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir);
const actualConfig = await configUtils.initConfig(languages, undefined, configFilePath, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
const actualConfig = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
@@ -191,12 +193,12 @@ ava_1.default("Default queries are used", async (t) => {
// with the correct arguments.
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function (queries, extraSearchPath) {
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return {
byLanguage: {
'javascript': {
'foo.ql': {},
javascript: {
"foo.ql": {},
},
},
noDeclaredLanguage: {},
@@ -210,13 +212,15 @@ ava_1.default("Default queries are used", async (t) => {
const inputFileContents = `
paths:
- foo`;
fs.mkdirSync(path.join(tmpDir, 'foo'));
const languages = 'javascript';
fs.mkdirSync(path.join(tmpDir, "foo"));
const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir);
await configUtils.initConfig(languages, undefined, configFilePath, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolve queries was called correctly
t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries, ['javascript-code-scanning.qls']);
t.deepEqual(resolveQueriesArgs[0].queries, [
"javascript-code-scanning.qls",
]);
t.deepEqual(resolveQueriesArgs[0].extraSearchPath, undefined);
});
});
@@ -227,10 +231,12 @@ ava_1.default("Default queries are used", async (t) => {
*/
function queriesToResolvedQueryForm(queries) {
const dummyResolvedQueries = {};
queries.forEach(q => { dummyResolvedQueries[q] = {}; });
queries.forEach((q) => {
dummyResolvedQueries[q] = {};
});
return {
byLanguage: {
'javascript': dummyResolvedQueries,
javascript: dummyResolvedQueries,
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
@@ -243,16 +249,16 @@ ava_1.default("Queries can be specified in config file", async (t) => {
queries:
- uses: ./foo`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
fs.mkdirSync(path.join(tmpDir, 'foo'));
fs.mkdirSync(path.join(tmpDir, "foo"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function (queries, extraSearchPath) {
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = 'javascript';
const config = await configUtils.initConfig(languages, undefined, configFilePath, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
const languages = "javascript";
const config = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries
// and once for `./foo` from the config file.
@@ -260,9 +266,9 @@ ava_1.default("Queries can be specified in config file", async (t) => {
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/foo$/);
// Now check that the end result contains the default queries and the query from config
t.deepEqual(config.queries['javascript'].length, 2);
t.regex(config.queries['javascript'][0], /javascript-code-scanning.qls$/);
t.regex(config.queries['javascript'][1], /.*\/foo$/);
t.deepEqual(config.queries["javascript"].length, 2);
t.regex(config.queries["javascript"][0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"][1], /.*\/foo$/);
});
});
ava_1.default("Queries from config file can be overridden in workflow file", async (t) => {
@@ -273,18 +279,18 @@ ava_1.default("Queries from config file can be overridden in workflow file", asy
- uses: ./foo`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
// This config item should take precedence over the config file but shouldn't affect the default queries.
const queries = './override';
fs.mkdirSync(path.join(tmpDir, 'foo'));
fs.mkdirSync(path.join(tmpDir, 'override'));
const queries = "./override";
fs.mkdirSync(path.join(tmpDir, "foo"));
fs.mkdirSync(path.join(tmpDir, "override"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function (queries, extraSearchPath) {
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = 'javascript';
const config = await configUtils.initConfig(languages, queries, configFilePath, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
const languages = "javascript";
const config = await configUtils.initConfig(languages, queries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries and once for `./override`,
// but won't be called for './foo' from the config file.
@@ -292,30 +298,30 @@ ava_1.default("Queries from config file can be overridden in workflow file", asy
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override$/);
// Now check that the end result contains only the default queries and the override query
t.deepEqual(config.queries['javascript'].length, 2);
t.regex(config.queries['javascript'][0], /javascript-code-scanning.qls$/);
t.regex(config.queries['javascript'][1], /.*\/override$/);
t.deepEqual(config.queries["javascript"].length, 2);
t.regex(config.queries["javascript"][0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"][1], /.*\/override$/);
});
});
ava_1.default("Queries in workflow file can be used in tandem with the 'disable default queries' option", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
process.env["RUNNER_TEMP"] = tmpDir;
process.env["GITHUB_WORKSPACE"] = tmpDir;
const inputFileContents = `
name: my config
disable-default-queries: true`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
const queries = './workflow-query';
fs.mkdirSync(path.join(tmpDir, 'workflow-query'));
const queries = "./workflow-query";
fs.mkdirSync(path.join(tmpDir, "workflow-query"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function (queries, extraSearchPath) {
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = 'javascript';
const config = await configUtils.initConfig(languages, queries, configFilePath, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
const languages = "javascript";
const config = await configUtils.initConfig(languages, queries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for `./workflow-query`,
// but won't be called for the default one since that was disabled
@@ -323,24 +329,24 @@ ava_1.default("Queries in workflow file can be used in tandem with the 'disable
t.deepEqual(resolveQueriesArgs[0].queries.length, 1);
t.regex(resolveQueriesArgs[0].queries[0], /.*\/workflow-query$/);
// Now check that the end result contains only the workflow query, and not the default one
t.deepEqual(config.queries['javascript'].length, 1);
t.regex(config.queries['javascript'][0], /.*\/workflow-query$/);
t.deepEqual(config.queries["javascript"].length, 1);
t.regex(config.queries["javascript"][0], /.*\/workflow-query$/);
});
});
ava_1.default("Multiple queries can be specified in workflow file, no config file required", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
fs.mkdirSync(path.join(tmpDir, 'override1'));
fs.mkdirSync(path.join(tmpDir, 'override2'));
const queries = './override1,./override2';
fs.mkdirSync(path.join(tmpDir, "override1"));
fs.mkdirSync(path.join(tmpDir, "override2"));
const queries = "./override1,./override2";
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function (queries, extraSearchPath) {
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = 'javascript';
const config = await configUtils.initConfig(languages, queries, undefined, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
const languages = "javascript";
const config = await configUtils.initConfig(languages, queries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly:
// It'll be called once for the default queries,
// and then once for each of the two queries from the workflow
@@ -350,35 +356,35 @@ ava_1.default("Multiple queries can be specified in workflow file, no config fil
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override1$/);
t.regex(resolveQueriesArgs[2].queries[0], /.*\/override2$/);
// Now check that the end result contains both the queries from the workflow, as well as the defaults
t.deepEqual(config.queries['javascript'].length, 3);
t.regex(config.queries['javascript'][0], /javascript-code-scanning.qls$/);
t.regex(config.queries['javascript'][1], /.*\/override1$/);
t.regex(config.queries['javascript'][2], /.*\/override2$/);
t.deepEqual(config.queries["javascript"].length, 3);
t.regex(config.queries["javascript"][0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"][1], /.*\/override1$/);
t.regex(config.queries["javascript"][2], /.*\/override2$/);
});
});
ava_1.default("Queries in workflow file can be added to the set of queries without overriding config file", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
process.env["RUNNER_TEMP"] = tmpDir;
process.env["GITHUB_WORKSPACE"] = tmpDir;
const inputFileContents = `
name: my config
queries:
- uses: ./foo`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
// These queries shouldn't override anything, because the value is prefixed with "+"
const queries = '+./additional1,./additional2';
fs.mkdirSync(path.join(tmpDir, 'foo'));
fs.mkdirSync(path.join(tmpDir, 'additional1'));
fs.mkdirSync(path.join(tmpDir, 'additional2'));
const queries = "+./additional1,./additional2";
fs.mkdirSync(path.join(tmpDir, "foo"));
fs.mkdirSync(path.join(tmpDir, "additional1"));
fs.mkdirSync(path.join(tmpDir, "additional2"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function (queries, extraSearchPath) {
async resolveQueries(queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return queriesToResolvedQueryForm(queries);
},
});
const languages = 'javascript';
const config = await configUtils.initConfig(languages, queries, configFilePath, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
const languages = "javascript";
const config = await configUtils.initConfig(languages, queries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries,
// once for each of additional1 and additional2,
@@ -391,24 +397,24 @@ ava_1.default("Queries in workflow file can be added to the set of queries witho
t.deepEqual(resolveQueriesArgs[3].queries.length, 1);
t.regex(resolveQueriesArgs[3].queries[0], /.*\/foo$/);
// Now check that the end result contains all the queries
t.deepEqual(config.queries['javascript'].length, 4);
t.regex(config.queries['javascript'][0], /javascript-code-scanning.qls$/);
t.regex(config.queries['javascript'][1], /.*\/additional1$/);
t.regex(config.queries['javascript'][2], /.*\/additional2$/);
t.regex(config.queries['javascript'][3], /.*\/foo$/);
t.deepEqual(config.queries["javascript"].length, 4);
t.regex(config.queries["javascript"][0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"][1], /.*\/additional1$/);
t.regex(config.queries["javascript"][2], /.*\/additional2$/);
t.regex(config.queries["javascript"][3], /.*\/foo$/);
});
});
ava_1.default("Invalid queries in workflow file handled correctly", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const queries = 'foo/bar@v1@v3';
const languages = 'javascript';
const queries = "foo/bar@v1@v3";
const languages = "javascript";
// This function just needs to be type-correct; it doesn't need to do anything,
// since we're deliberately passing in invalid data
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function (_queries, _extraSearchPath) {
async resolveQueries(_queries, _extraSearchPath) {
return {
byLanguage: {
'javascript': {},
javascript: {},
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
@@ -416,8 +422,8 @@ ava_1.default("Invalid queries in workflow file handled correctly", async (t) =>
},
});
try {
await configUtils.initConfig(languages, queries, undefined, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
t.fail('initConfig did not throw error');
await configUtils.initConfig(languages, queries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
t.fail("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getQueryUsesInvalid(undefined, "foo/bar@v1@v3")));
@@ -427,11 +433,11 @@ ava_1.default("Invalid queries in workflow file handled correctly", async (t) =>
ava_1.default("API client used when reading remote config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function () {
async resolveQueries() {
return {
byLanguage: {
'javascript': {
'foo.ql': {},
javascript: {
"foo.ql": {},
},
},
noDeclaredLanguage: {},
@@ -456,10 +462,10 @@ ava_1.default("API client used when reading remote config", async (t) => {
};
const spyGetContents = mockGetContents(dummyResponse);
// Create checkout directory for remote queries repository
fs.mkdirSync(path.join(tmpDir, 'foo/bar/dev'), { recursive: true });
const configFile = 'octo-org/codeql-config/config.yaml@main';
const languages = 'javascript';
await configUtils.initConfig(languages, undefined, configFile, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
const configFile = "octo-org/codeql-config/config.yaml@main";
const languages = "javascript";
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
t.assert(spyGetContents.called);
});
});
@@ -467,10 +473,10 @@ ava_1.default("Remote config handles the case where a directory is provided", as
return await util.withTmpDir(async (tmpDir) => {
const dummyResponse = []; // directories are returned as arrays
mockGetContents(dummyResponse);
const repoReference = 'octo-org/codeql-config/config.yaml@main';
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(undefined, undefined, repoReference, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
throw new Error('initConfig did not throw error');
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDirectoryGivenMessage(repoReference)));
@@ -483,10 +489,10 @@ ava_1.default("Invalid format of remote config handled correctly", async (t) =>
// note no "content" property here
};
mockGetContents(dummyResponse);
const repoReference = 'octo-org/codeql-config/config.yaml@main';
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(undefined, undefined, repoReference, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
throw new Error('initConfig did not throw error');
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference)));
@@ -497,8 +503,8 @@ ava_1.default("No detected languages", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
mockListLanguages([]);
try {
await configUtils.initConfig(undefined, undefined, undefined, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
throw new Error('initConfig did not throw error');
await configUtils.initConfig(undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getNoLanguagesError()));
@@ -507,21 +513,21 @@ ava_1.default("No detected languages", async (t) => {
});
ava_1.default("Unknown languages", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const languages = 'ruby,english';
const languages = "ruby,english";
try {
await configUtils.initConfig(languages, undefined, undefined, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
throw new Error('initConfig did not throw error');
await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(['ruby', 'english'])));
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(["ruby", "english"])));
}
});
});
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
ava_1.default("load invalid input - " + testName, async (t) => {
ava_1.default(`load invalid input - ${testName}`, async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function () {
async resolveQueries() {
return {
byLanguage: {},
noDeclaredLanguage: {},
@@ -529,13 +535,13 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
};
},
});
const languages = 'javascript';
const configFile = 'input';
const languages = "javascript";
const configFile = "input";
const inputFile = path.join(tmpDir, configFile);
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
fs.writeFileSync(inputFile, inputFileContents, "utf8");
try {
await configUtils.initConfig(languages, undefined, configFile, { owner: 'github', repo: 'example ' }, tmpDir, tmpDir, codeQL, tmpDir, 'token', 'https://github.example.com', logging_1.getRunnerLogger(true));
throw new Error('initConfig did not throw error');
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
@@ -543,14 +549,14 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
});
});
}
doInvalidInputTest('name invalid type', `
doInvalidInputTest("name invalid type", `
name:
- foo: bar`, configUtils.getNameInvalid);
doInvalidInputTest('disable-default-queries invalid type', `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
doInvalidInputTest('queries invalid type', `queries: foo`, configUtils.getQueriesInvalid);
doInvalidInputTest('paths-ignore invalid type', `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
doInvalidInputTest('paths invalid type', `paths: 17`, configUtils.getPathsInvalid);
doInvalidInputTest('queries uses invalid type', `
doInvalidInputTest("disable-default-queries invalid type", `disable-default-queries: 42`, configUtils.getDisableDefaultQueriesInvalid);
doInvalidInputTest("queries invalid type", `queries: foo`, configUtils.getQueriesInvalid);
doInvalidInputTest("paths-ignore invalid type", `paths-ignore: bar`, configUtils.getPathsIgnoreInvalid);
doInvalidInputTest("paths invalid type", `paths: 17`, configUtils.getPathsInvalid);
doInvalidInputTest("queries uses invalid type", `
queries:
- uses:
- hello: world`, configUtils.getQueryUsesInvalid);
@@ -561,38 +567,33 @@ function doInvalidQueryUsesTest(input, expectedErrorMessageGenerator) {
name: my config
queries:
- name: foo
uses: ` + input;
doInvalidInputTest("queries uses \"" + input + "\"", inputFileContents, expectedErrorMessageGenerator);
uses: ${input}`;
doInvalidInputTest(`queries uses "${input}"`, inputFileContents, expectedErrorMessageGenerator);
}
// Various "uses" fields, and the errors they should produce
doInvalidQueryUsesTest("''", c => configUtils.getQueryUsesInvalid(c, undefined));
doInvalidQueryUsesTest("foo/bar", c => configUtils.getQueryUsesInvalid(c, "foo/bar"));
doInvalidQueryUsesTest("foo/bar@v1@v2", c => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
doInvalidQueryUsesTest("foo@master", c => configUtils.getQueryUsesInvalid(c, "foo@master"));
doInvalidQueryUsesTest("https://github.com/foo/bar@master", c => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
doInvalidQueryUsesTest("./foo", c => configUtils.getLocalPathDoesNotExist(c, "foo"));
doInvalidQueryUsesTest("./..", c => configUtils.getLocalPathOutsideOfRepository(c, ".."));
doInvalidQueryUsesTest("''", (c) => configUtils.getQueryUsesInvalid(c, undefined));
doInvalidQueryUsesTest("foo/bar", (c) => configUtils.getQueryUsesInvalid(c, "foo/bar"));
doInvalidQueryUsesTest("foo/bar@v1@v2", (c) => configUtils.getQueryUsesInvalid(c, "foo/bar@v1@v2"));
doInvalidQueryUsesTest("foo@master", (c) => configUtils.getQueryUsesInvalid(c, "foo@master"));
doInvalidQueryUsesTest("https://github.com/foo/bar@master", (c) => configUtils.getQueryUsesInvalid(c, "https://github.com/foo/bar@master"));
doInvalidQueryUsesTest("./foo", (c) => configUtils.getLocalPathDoesNotExist(c, "foo"));
doInvalidQueryUsesTest("./..", (c) => configUtils.getLocalPathOutsideOfRepository(c, ".."));
const validPaths = [
'foo',
'foo/',
'foo/**',
'foo/**/',
'foo/**/**',
'foo/**/bar/**/baz',
'**/',
'**/foo',
'/foo',
"foo",
"foo/",
"foo/**",
"foo/**/",
"foo/**/**",
"foo/**/bar/**/baz",
"**/",
"**/foo",
"/foo",
];
const invalidPaths = [
'a/***/b',
'a/**b',
'a/b**',
'**',
];
ava_1.default('path validations', t => {
const invalidPaths = ["a/***/b", "a/**b", "a/b**", "**"];
ava_1.default("path validations", (t) => {
// Dummy values to pass to validateAndSanitisePath
const propertyName = 'paths';
const configFile = './.github/codeql/config.yml';
const propertyName = "paths";
const configFile = "./.github/codeql/config.yml";
for (const path of validPaths) {
t.truthy(configUtils.validateAndSanitisePath(path, propertyName, configFile, logging_1.getRunnerLogger(true)));
}
@@ -600,13 +601,13 @@ ava_1.default('path validations', t => {
t.throws(() => configUtils.validateAndSanitisePath(path, propertyName, configFile, logging_1.getRunnerLogger(true)));
}
});
ava_1.default('path sanitisation', t => {
ava_1.default("path sanitisation", (t) => {
// Dummy values to pass to validateAndSanitisePath
const propertyName = 'paths';
const configFile = './.github/codeql/config.yml';
const propertyName = "paths";
const configFile = "./.github/codeql/config.yml";
// Valid paths are not modified
t.deepEqual(configUtils.validateAndSanitisePath('foo/bar', propertyName, configFile, logging_1.getRunnerLogger(true)), 'foo/bar');
t.deepEqual(configUtils.validateAndSanitisePath("foo/bar", propertyName, configFile, logging_1.getRunnerLogger(true)), "foo/bar");
// Trailing stars are stripped
t.deepEqual(configUtils.validateAndSanitisePath('foo/**', propertyName, configFile, logging_1.getRunnerLogger(true)), 'foo/');
t.deepEqual(configUtils.validateAndSanitisePath("foo/**", propertyName, configFile, logging_1.getRunnerLogger(true)), "foo/");
});
//# sourceMappingURL=config-utils.test.js.map

File diff suppressed because one or more lines are too long

View File

@@ -14,19 +14,24 @@ const path = __importStar(require("path"));
* Check out repository at the given ref, and return the directory of the checkout.
*/
async function checkoutExternalRepository(repository, ref, githubUrl, tempDir, logger) {
logger.info('Checking out ' + repository);
logger.info(`Checking out ${repository}`);
const checkoutLocation = path.join(tempDir, repository, ref);
if (!checkoutLocation.startsWith(tempDir)) {
// this still permits locations that mess with sibling repositories in `tempDir`, but that is acceptable
throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`);
}
if (!fs.existsSync(checkoutLocation)) {
const repoURL = githubUrl + '/' + repository;
await new toolrunnner.ToolRunner('git', ['clone', repoURL, checkoutLocation]).exec();
await new toolrunnner.ToolRunner('git', [
'--work-tree=' + checkoutLocation,
'--git-dir=' + checkoutLocation + '/.git',
'checkout', ref,
const repoURL = `${githubUrl}/${repository}`;
await new toolrunnner.ToolRunner("git", [
"clone",
repoURL,
checkoutLocation,
]).exec();
await new toolrunnner.ToolRunner("git", [
`--work-tree=${checkoutLocation}`,
`--git-dir=${checkoutLocation}/.git`,
"checkout",
ref,
]).exec();
}
return checkoutLocation;

View File

@@ -1 +1 @@
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,0EAA4D;AAC5D,uCAAyB;AACzB,2CAA6B;AAI7B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,SAAiB,EACjB,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,eAAe,GAAG,UAAU,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CAAC,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAAC,CAAC;KACpF;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,SAAS,GAAG,GAAG,GAAG,UAAU,CAAC;QAC7C,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC;QACrF,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE;YACtC,cAAc,GAAG,gBAAgB;YACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;YACzC,UAAU,EAAE,GAAG;SAChB,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AA3BD,gEA2BC"}
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,0EAA4D;AAC5D,uCAAyB;AACzB,2CAA6B;AAI7B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,SAAiB,EACjB,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,GAAG,SAAS,IAAI,UAAU,EAAE,CAAC;QAC7C,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE;YACtC,OAAO;YACP,OAAO;YACP,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE;YACtC,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC"}

View File

@@ -26,62 +26,70 @@ ava_1.default("checkoutExternalQueries", async (t) => {
// - the first commit contains files 'a' and 'b'
// - the second commit contains only 'a'
// Place the repo in a subdir because we're going to checkout a copy in tmpDir
const testRepoBaseDir = path.join(tmpDir, 'test-repo-dir');
const repoName = 'some/repo';
const testRepoBaseDir = path.join(tmpDir, "test-repo-dir");
const repoName = "some/repo";
const repoPath = path.join(testRepoBaseDir, repoName);
const repoGitDir = path.join(repoPath, '.git');
const repoGitDir = path.join(repoPath, ".git");
// Run the given git command, and return the output.
// Passes --git-dir and --work-tree.
// Any stderr output is suppressed until the command fails.
const runGit = async function (command) {
let stdout = '';
let stderr = '';
command = [`--git-dir=${repoGitDir}`, `--work-tree=${repoPath}`, ...command];
console.log('Running: git ' + command.join(' '));
let stdout = "";
let stderr = "";
command = [
`--git-dir=${repoGitDir}`,
`--work-tree=${repoPath}`,
...command,
];
console.log(`Running: git ${command.join(" ")}`);
try {
await new toolrunnner.ToolRunner('git', command, {
await new toolrunnner.ToolRunner("git", command, {
silent: true,
listeners: {
stdout: (data) => { stdout += data.toString(); },
stderr: (data) => { stderr += data.toString(); },
}
stdout: (data) => {
stdout += data.toString();
},
stderr: (data) => {
stderr += data.toString();
},
},
}).exec();
}
catch (e) {
console.log('Command failed: git ' + command.join(' '));
console.log(`Command failed: git ${command.join(" ")}`);
process.stderr.write(stderr);
throw e;
}
return stdout.trim();
};
fs.mkdirSync(repoPath, { recursive: true });
await runGit(['init', repoPath]);
await runGit(['config', 'user.email', 'test@github.com']);
await runGit(['config', 'user.name', 'Test Test']);
fs.writeFileSync(path.join(repoPath, 'a'), 'a content');
await runGit(['add', 'a']);
await runGit(['commit', '-m', 'commit1']);
fs.writeFileSync(path.join(repoPath, 'b'), 'b content');
await runGit(['add', 'b']);
await runGit(['commit', '-m', 'commit1']);
const commit1Sha = await runGit(['rev-parse', 'HEAD']);
fs.unlinkSync(path.join(repoPath, 'b'));
await runGit(['add', 'b']);
await runGit(['commit', '-m', 'commit2']);
const commit2Sha = await runGit(['rev-parse', 'HEAD']);
await runGit(["init", repoPath]);
await runGit(["config", "user.email", "test@github.com"]);
await runGit(["config", "user.name", "Test Test"]);
fs.writeFileSync(path.join(repoPath, "a"), "a content");
await runGit(["add", "a"]);
await runGit(["commit", "-m", "commit1"]);
fs.writeFileSync(path.join(repoPath, "b"), "b content");
await runGit(["add", "b"]);
await runGit(["commit", "-m", "commit1"]);
const commit1Sha = await runGit(["rev-parse", "HEAD"]);
fs.unlinkSync(path.join(repoPath, "b"));
await runGit(["add", "b"]);
await runGit(["commit", "-m", "commit2"]);
const commit2Sha = await runGit(["rev-parse", "HEAD"]);
// Checkout the first commit, which should contain 'a' and 'b'
t.false(fs.existsSync(path.join(tmpDir, repoName)));
await externalQueries.checkoutExternalRepository(repoName, commit1Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true));
t.true(fs.existsSync(path.join(tmpDir, repoName)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, 'a')));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, 'b')));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a")));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b")));
// Checkout the second commit as well, which should only contain 'a'
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
await externalQueries.checkoutExternalRepository(repoName, commit2Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, 'a')));
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, 'b')));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a")));
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
});
});
//# sourceMappingURL=external-queries.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,0EAA4D;AAC5D,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAU,OAAiB;YAC7C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG,CAAC,aAAa,UAAU,EAAE,EAAE,eAAe,QAAQ,EAAE,EAAE,GAAG,OAAO,CAAC,CAAC;YAC7E,OAAO,CAAC,GAAG,CAAC,eAAe,GAAG,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE,OAAO,EAAE;oBAC/C,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE,GAAG,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC;wBAChD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE,GAAG,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC;qBACjD;iBACF,CAAC,CAAC,IAAI,EAAE,CAAC;aACX;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QAEnD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAIvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CAAC,CAAC;QACzB,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAIpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CAAC,CAAC;QACzB,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,0EAA4D;AAC5D,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE,OAAO,EAAE;oBAC/C,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CAAC,CAAC,IAAI,EAAE,CAAC;aACX;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QAEnD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

41
lib/fingerprints.js generated
View File

@@ -12,10 +12,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const long_1 = __importDefault(require("long"));
const tab = '\t'.charCodeAt(0);
const space = ' '.charCodeAt(0);
const lf = '\n'.charCodeAt(0);
const cr = '\r'.charCodeAt(0);
const tab = "\t".charCodeAt(0);
const space = " ".charCodeAt(0);
const lf = "\n".charCodeAt(0);
const cr = "\r".charCodeAt(0);
const BLOCK_SIZE = 100;
const MOD = long_1.default.fromInt(37); // L
// Compute the starting point for the hash mod
@@ -47,7 +47,7 @@ function hash(callback, input) {
const lineNumbers = Array(BLOCK_SIZE).fill(-1);
// The current hash value, updated as we read each character
let hash = long_1.default.ZERO;
let firstMod = computeFirstMod();
const firstMod = computeFirstMod();
// The current index in the window, will wrap around to zero when we reach BLOCK_SIZE
let index = 0;
// The line number of the character we are currently processing from the input
@@ -61,12 +61,12 @@ function hash(callback, input) {
const hashCounts = {};
// Output the current hash and line number to the callback function
const outputHash = function () {
let hashValue = hash.toUnsigned().toString(16);
const hashValue = hash.toUnsigned().toString(16);
if (!hashCounts[hashValue]) {
hashCounts[hashValue] = 0;
}
hashCounts[hashValue]++;
callback(lineNumbers[index], hashValue + ":" + hashCounts[hashValue]);
callback(lineNumbers[index], `${hashValue}:${hashCounts[hashValue]}`);
lineNumbers[index] = -1;
};
// Update the current hash value and increment the index in the window
@@ -145,10 +145,7 @@ function locationUpdateCallback(result, location, logger) {
result.partialFingerprints.primaryLocationLineHash = hash;
}
else if (existingFingerprint !== hash) {
logger.warning('Calculated fingerprint of ' + hash +
' for file ' + location.physicalLocation.artifactLocation.uri +
' line ' + lineNumber +
', but found existing inconsistent fingerprint value ' + existingFingerprint);
logger.warning(`Calculated fingerprint of ${hash} for file ${location.physicalLocation.artifactLocation.uri} line ${lineNumber}, but found existing inconsistent fingerprint value ${existingFingerprint}`);
}
};
}
@@ -159,40 +156,40 @@ function locationUpdateCallback(result, location, logger) {
function resolveUriToFile(location, artifacts, checkoutPath, logger) {
// This may be referencing an artifact
if (!location.uri && location.index !== undefined) {
if (typeof location.index !== 'number' ||
if (typeof location.index !== "number" ||
location.index < 0 ||
location.index >= artifacts.length ||
typeof artifacts[location.index].location !== 'object') {
typeof artifacts[location.index].location !== "object") {
logger.debug(`Ignoring location as URI "${location.index}" is invalid`);
return undefined;
}
location = artifacts[location.index].location;
}
// Get the URI and decode
if (typeof location.uri !== 'string') {
if (typeof location.uri !== "string") {
logger.debug(`Ignoring location as index "${location.uri}" is invalid`);
return undefined;
}
let uri = decodeURIComponent(location.uri);
// Remove a file scheme, and abort if the scheme is anything else
const fileUriPrefix = 'file://';
const fileUriPrefix = "file://";
if (uri.startsWith(fileUriPrefix)) {
uri = uri.substring(fileUriPrefix.length);
}
if (uri.indexOf('://') !== -1) {
if (uri.indexOf("://") !== -1) {
logger.debug(`Ignoring location URI "${uri}" as the scheme is not recognised`);
return undefined;
}
// Discard any absolute paths that aren't in the src root
const srcRootPrefix = checkoutPath + '/';
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
const srcRootPrefix = `${checkoutPath}/`;
if (uri.startsWith("/") && !uri.startsWith(srcRootPrefix)) {
logger.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
return undefined;
}
// Just assume a relative path is relative to the src root.
// This is not necessarily true but should be a good approximation
// and here we likely want to err on the side of handling more cases.
if (!uri.startsWith('/')) {
if (!uri.startsWith("/")) {
uri = srcRootPrefix + uri;
}
// Check the file exists
@@ -207,13 +204,13 @@ exports.resolveUriToFile = resolveUriToFile;
// and return an updated sarif file contents.
function addFingerprints(sarifContents, checkoutPath, logger) {
var _a, _b;
let sarif = JSON.parse(sarifContents);
const sarif = JSON.parse(sarifContents);
// Gather together results for the same file and construct
// callbacks to accept hashes for that file and update the location
const callbacksByFile = {};
for (const run of sarif.runs || []) {
// We may need the list of artifacts to resolve against
let artifacts = run.artifacts || [];
const artifacts = run.artifacts || [];
for (const result of run.results || []) {
// Check the primary location is defined correctly and is in the src root
const primaryLocation = (result.locations || [])[0];
@@ -235,7 +232,7 @@ function addFingerprints(sarifContents, checkoutPath, logger) {
Object.entries(callbacksByFile).forEach(([filepath, callbacks]) => {
// A callback that forwards the hash to all other callbacks for that file
const teeCallback = function (lineNumber, hash) {
Object.values(callbacks).forEach(c => c(lineNumber, hash));
Object.values(callbacks).forEach((c) => c(lineNumber, hash));
};
const fileContents = fs.readFileSync(filepath).toString();
hash(teeCallback, fileContents);

File diff suppressed because one or more lines are too long

View File

@@ -19,7 +19,7 @@ const testing_utils_1 = require("./testing-utils");
testing_utils_1.setupTests(ava_1.default);
function testHash(t, input, expectedHashes) {
let index = 0;
let callback = function (lineNumber, hash) {
const callback = function (lineNumber, hash) {
t.is(lineNumber, index + 1);
t.is(hash, expectedHashes[index]);
index++;
@@ -27,7 +27,7 @@ function testHash(t, input, expectedHashes) {
fingerprints.hash(callback, input);
t.is(index, input.split(/\r\n|\r|\n/).length);
}
ava_1.default('hash', (t) => {
ava_1.default("hash", (t) => {
// Try empty file
testHash(t, "", ["c129715d7a2bc9a3:1"]);
// Try various combinations of newline characters
@@ -35,7 +35,7 @@ ava_1.default('hash', (t) => {
"271789c17abda88f:1",
"54703d4cd895b18:1",
"180aee12dab6264:1",
"a23a3dc5e078b07b:1"
"a23a3dc5e078b07b:1",
]);
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
"8b7cf3e952e7aeb2:1",
@@ -93,7 +93,7 @@ ava_1.default('hash', (t) => {
"a9cf91f7bbf1862b:1",
"55ec222b86bcae53:1",
"cc97dc7b1d7d8f7b:1",
"c129715d7a2bc9a3:1"
"c129715d7a2bc9a3:1",
]);
testHash(t, "x = 2\nx = 1\nprint(x)\nx = 3\nprint(x)\nx = 4\nprint(x)\n", [
"e54938cc54b302f1:1",
@@ -103,67 +103,75 @@ ava_1.default('hash', (t) => {
"54517377da7028d2:1",
"2c644846cb18d53e:1",
"f1b89f20de0d133:1",
"c129715d7a2bc9a3:1"
"c129715d7a2bc9a3:1",
]);
});
function testResolveUriToFile(uri, index, artifactsURIs) {
const location = { "uri": uri, "index": index };
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
const location = { uri, index };
const artifacts = artifactsURIs.map((uri) => ({ location: { uri } }));
return fingerprints.resolveUriToFile(location, artifacts, process.cwd(), logging_1.getRunnerLogger(true));
}
ava_1.default('resolveUriToFile', t => {
ava_1.default("resolveUriToFile", (t) => {
// The resolveUriToFile method checks that the file exists and is in the right directory
// so we need to give it real files to look at. We will use this file as an example.
// For this to work we require the current working directory to be a parent, but this
// should generally always be the case so this is fine.
const cwd = process.cwd();
const filepath = __filename;
t.true(filepath.startsWith(cwd + '/'));
t.true(filepath.startsWith(`${cwd}/`));
const relativeFilepaht = filepath.substring(cwd.length + 1);
// Absolute paths are unmodified
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
t.is(testResolveUriToFile(`file://${filepath}`, undefined, []), filepath);
// Relative paths are made absolute
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
t.is(testResolveUriToFile(`file://${relativeFilepaht}`, undefined, []), filepath);
// Absolute paths outside the src root are discarded
t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
t.is(testResolveUriToFile("/src/foo/bar.js", undefined, []), undefined);
t.is(testResolveUriToFile("file:///src/foo/bar.js", undefined, []), undefined);
// Other schemes are discarded
t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
t.is(testResolveUriToFile(`https://${filepath}`, undefined, []), undefined);
t.is(testResolveUriToFile(`ftp://${filepath}`, undefined, []), undefined);
// Invalid URIs are discarded
t.is(testResolveUriToFile(1, undefined, []), undefined);
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
// Non-existant files are discarded
t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
t.is(testResolveUriToFile(`${filepath}2`, undefined, []), undefined);
// Index is resolved
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
t.is(testResolveUriToFile(undefined, 1, ["foo", filepath]), filepath);
// Invalid indexes are discarded
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
t.is(testResolveUriToFile(undefined, "0", [filepath]), undefined);
});
ava_1.default('addFingerprints', t => {
ava_1.default("addFingerprints", (t) => {
// Run an end-to-end test on a test file
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
let input = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
.toString();
let expected = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.expected.sarif`)
.toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
const checkoutPath = path.normalize(__dirname + '/../src/testdata');
const checkoutPath = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, logging_1.getRunnerLogger(true)), expected);
});
ava_1.default('missingRegions', t => {
ava_1.default("missingRegions", (t) => {
// Run an end-to-end test on a test file
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
let input = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
.toString();
let expected = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.expected.sarif`)
.toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
const checkoutPath = path.normalize(__dirname + '/../src/testdata');
const checkoutPath = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, logging_1.getRunnerLogger(true)), expected);
});
//# sourceMappingURL=fingerprints.test.js.map

File diff suppressed because one or more lines are too long

50
lib/init-action.js generated
View File

@@ -13,21 +13,25 @@ const logging_1 = require("./logging");
const repository_1 = require("./repository");
const util = __importStar(require("./util"));
async function sendSuccessStatusReport(startedAt, config) {
const statusReportBase = await util.createStatusReportBase('init', 'success', startedAt);
const languages = config.languages.join(',');
const workflowLanguages = core.getInput('languages', { required: false });
const paths = (config.originalUserInput.paths || []).join(',');
const pathsIgnore = (config.originalUserInput['paths-ignore'] || []).join(',');
const disableDefaultQueries = config.originalUserInput['disable-default-queries'] ? languages : '';
const queries = (config.originalUserInput.queries || []).map(q => q.uses).join(',');
const statusReportBase = await util.createStatusReportBase("init", "success", startedAt);
const languages = config.languages.join(",");
const workflowLanguages = core.getInput("languages", { required: false });
const paths = (config.originalUserInput.paths || []).join(",");
const pathsIgnore = (config.originalUserInput["paths-ignore"] || []).join(",");
const disableDefaultQueries = config.originalUserInput["disable-default-queries"]
? languages
: "";
const queries = (config.originalUserInput.queries || [])
.map((q) => q.uses)
.join(",");
const statusReport = {
...statusReportBase,
languages: languages,
languages,
workflow_languages: workflowLanguages,
paths: paths,
paths,
paths_ignore: pathsIgnore,
disable_default_queries: disableDefaultQueries,
queries: queries,
queries,
};
await util.sendStatusReport(statusReport);
}
@@ -38,46 +42,46 @@ async function run() {
let codeql;
try {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('init', 'starting', startedAt), true)) {
if (!(await util.sendStatusReport(await util.createStatusReportBase("init", "starting", startedAt), true))) {
return;
}
codeql = await init_1.initCodeQL(core.getInput('tools'), core.getInput('token'), util.getRequiredEnvParam('GITHUB_SERVER_URL'), util.getRequiredEnvParam('RUNNER_TEMP'), util.getRequiredEnvParam('RUNNER_TOOL_CACHE'), 'actions', logger);
config = await init_1.initConfig(core.getInput('languages'), core.getInput('queries'), core.getInput('config-file'), repository_1.parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')), util.getRequiredEnvParam('RUNNER_TEMP'), util.getRequiredEnvParam('RUNNER_TOOL_CACHE'), codeql, util.getRequiredEnvParam('GITHUB_WORKSPACE'), core.getInput('token'), util.getRequiredEnvParam('GITHUB_SERVER_URL'), logger);
codeql = await init_1.initCodeQL(core.getInput("tools"), core.getInput("token"), util.getRequiredEnvParam("GITHUB_SERVER_URL"), util.getRequiredEnvParam("RUNNER_TEMP"), util.getRequiredEnvParam("RUNNER_TOOL_CACHE"), "actions", logger);
config = await init_1.initConfig(core.getInput("languages"), core.getInput("queries"), core.getInput("config-file"), repository_1.parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")), util.getRequiredEnvParam("RUNNER_TEMP"), util.getRequiredEnvParam("RUNNER_TOOL_CACHE"), codeql, util.getRequiredEnvParam("GITHUB_WORKSPACE"), core.getInput("token"), util.getRequiredEnvParam("GITHUB_SERVER_URL"), logger);
}
catch (e) {
core.setFailed(e.message);
console.log(e);
await util.sendStatusReport(await util.createStatusReportBase('init', 'aborted', startedAt, e.message));
await util.sendStatusReport(await util.createStatusReportBase("init", "aborted", startedAt, e.message));
return;
}
try {
// Forward Go flags
const goFlags = process.env['GOFLAGS'];
const goFlags = process.env["GOFLAGS"];
if (goFlags) {
core.exportVariable('GOFLAGS', goFlags);
core.exportVariable("GOFLAGS", goFlags);
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
}
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
core.exportVariable('CODEQL_RAM', codeqlRam);
const codeqlRam = process.env["CODEQL_RAM"] || "6500";
core.exportVariable("CODEQL_RAM", codeqlRam);
const tracerConfig = await init_1.runInit(codeql, config);
if (tracerConfig !== undefined) {
Object.entries(tracerConfig.env).forEach(([key, value]) => core.exportVariable(key, value));
if (process.platform === 'win32') {
await init_1.injectWindowsTracer('Runner.Worker.exe', undefined, config, codeql, tracerConfig);
if (process.platform === "win32") {
await init_1.injectWindowsTracer("Runner.Worker.exe", undefined, config, codeql, tracerConfig);
}
}
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await util.sendStatusReport(await util.createStatusReportBase('init', 'failure', startedAt, error.message, error.stack));
await util.sendStatusReport(await util.createStatusReportBase("init", "failure", startedAt, error.message, error.stack));
return;
}
await sendSuccessStatusReport(startedAt, config);
}
run().catch(e => {
core.setFailed("init action failed: " + e);
run().catch((e) => {
core.setFailed(`init action failed: ${e}`);
console.log(e);
});
//# sourceMappingURL=init-action.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"init-action.js","sourceRoot":"","sources":["../src/init-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,iCAA8E;AAC9E,uCAA6C;AAC7C,6CAAkD;AAClD,6CAA+B;AAkB/B,KAAK,UAAU,uBAAuB,CAAC,SAAe,EAAE,MAA0B;IAChF,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IAEzF,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC7C,MAAM,iBAAiB,GAAG,IAAI,CAAC,QAAQ,CAAC,WAAW,EAAE,EAAE,QAAQ,EAAE,KAAK,EAAE,CAAC,CAAC;IAC1E,MAAM,KAAK,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/E,MAAM,qBAAqB,GAAG,MAAM,CAAC,iBAAiB,CAAC,yBAAyB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC;IACnG,MAAM,OAAO,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,OAAO,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAEpF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,SAAS,EAAE,SAAS;QACpB,kBAAkB,EAAE,iBAAiB;QACrC,KAAK,EAAE,KAAK;QACZ,YAAY,EAAE,WAAW;QACzB,uBAAuB,EAAE,qBAAqB;QAC9C,OAAO,EAAE,OAAO;KACjB,CAAC;IAEF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,IAAI,MAA0B,CAAC;IAC/B,IAAI,MAAc,CAAC;IAEnB,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YACxG,OAAO;SACR;QAED,MAAM,GAAG,MAAM,iBAAU,CACvB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EACvC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,SAAS,EACT,MAAM,CAAC,CAAC;QACV,MAAM,GAAG,MAAM,iBAAU,CACvB,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,EAC1B,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,EACxB,IAAI,CAAC,QAAQ,CAAC,aAAa,CAAC,EAC5B,+BAAkB,CAAC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACjE,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EACvC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,MAAM,EACN,IAAI,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,EAC5C,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,MAAM,CAAC,CAAC;KAEX;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;QACxG,OAAO;KACR;IAED,IAAI;QAEF,mBAAmB;QACnB,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QACvC,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YACxC,IAAI,CAAC,OAAO,CAAC,6GAA6G,CAAC,CAAC;SAC7H;QAED,mGAAmG;QACnG,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,MAAM,CAAC;QACtD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;QAE7C,MAAM,YAAY,GAAG,MAAM,cAAO,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACnD,IAAI,YAAY,KAAK,SAAS,EAAE;YAC9B,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC;YAE5F,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,0BAAmB,CAAC,mBAAmB,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;aACzF;SACF;KAEF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAC3D,MAAM,EACN,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;QAChB,OAAO;KACR;IACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;AACnD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,sBAAsB,GAAG,CAAC,CAAC,CAAC;IAC3C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
{"version":3,"file":"init-action.js","sourceRoot":"","sources":["../src/init-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,iCAA8E;AAC9E,uCAA6C;AAC7C,6CAAkD;AAClD,6CAA+B;AAkB/B,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,MAA0B;IAE1B,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,MAAM,EACN,SAAS,EACT,SAAS,CACV,CAAC;IAEF,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC7C,MAAM,iBAAiB,GAAG,IAAI,CAAC,QAAQ,CAAC,WAAW,EAAE,EAAE,QAAQ,EAAE,KAAK,EAAE,CAAC,CAAC;IAC1E,MAAM,KAAK,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CACvE,GAAG,CACJ,CAAC;IACF,MAAM,qBAAqB,GAAG,MAAM,CAAC,iBAAiB,CACpD,yBAAyB,CAC1B;QACC,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,EAAE,CAAC;IACP,MAAM,OAAO,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,OAAO,IAAI,EAAE,CAAC;SACrD,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;SAClB,IAAI,CAAC,GAAG,CAAC,CAAC;IAEb,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,SAAS;QACT,kBAAkB,EAAE,iBAAiB;QACrC,KAAK;QACL,YAAY,EAAE,WAAW;QACzB,uBAAuB,EAAE,qBAAqB;QAC9C,OAAO;KACR,CAAC;IAEF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,IAAI,MAA0B,CAAC;IAC/B,IAAI,MAAc,CAAC;IAEnB,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IACE,CAAC,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAC3B,MAAM,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,UAAU,EAAE,SAAS,CAAC,EAChE,IAAI,CACL,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,GAAG,MAAM,iBAAU,CACvB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EACvC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,SAAS,EACT,MAAM,CACP,CAAC;QACF,MAAM,GAAG,MAAM,iBAAU,CACvB,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,EAC1B,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,EACxB,IAAI,CAAC,QAAQ,CAAC,aAAa,CAAC,EAC5B,+BAAkB,CAAC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACjE,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EACvC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,MAAM,EACN,IAAI,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,EAC5C,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,MAAM,CACP,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,IAAI,CAAC,gBAAgB,CACzB,MAAM,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,EAAE,CAAC,CAAC,OAAO,CAAC,CAC3E,CAAC;QACF,OAAO;KACR;IAED,IAAI;QACF,mBAAmB;QACnB,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QACvC,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YACxC,IAAI,CAAC,OAAO,CACV,6GAA6G,CAC9G,CAAC;SACH;QAED,mGAAmG;QACnG,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,MAAM,CAAC;QACtD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;QAE7C,MAAM,YAAY,GAAG,MAAM,cAAO,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACnD,IAAI,YAAY,KAAK,SAAS,EAAE;YAC9B,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CACxD,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,CAAC,CAChC,CAAC;YAEF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,0BAAmB,CACvB,mBAAmB,EACnB,SAAS,EACT,MAAM,EACN,MAAM,EACN,YAAY,CACb,CAAC;aACH;SACF;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAI,CAAC,gBAAgB,CACzB,MAAM,IAAI,CAAC,sBAAsB,CAC/B,MAAM,EACN,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;AACnD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,EAAE,CAAC,CAAC;IAC3C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

20
lib/init.js generated
View File

@@ -16,7 +16,7 @@ const configUtils = __importStar(require("./config-utils"));
const tracer_config_1 = require("./tracer-config");
const util = __importStar(require("./util"));
async function initCodeQL(codeqlURL, githubAuth, githubUrl, tempDir, toolsDir, mode, logger) {
logger.startGroup('Setup CodeQL tools');
logger.startGroup("Setup CodeQL tools");
const codeql = await codeql_1.setupCodeQL(codeqlURL, githubAuth, githubUrl, tempDir, toolsDir, mode, logger);
await codeql.printVersion();
logger.endGroup();
@@ -24,7 +24,7 @@ async function initCodeQL(codeqlURL, githubAuth, githubUrl, tempDir, toolsDir, m
}
exports.initCodeQL = initCodeQL;
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) {
logger.startGroup('Load language configuration');
logger.startGroup("Load language configuration");
const config = await configUtils.initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger);
analysisPaths.printPathFiltersWarning(config, logger);
logger.endGroup();
@@ -35,7 +35,7 @@ async function runInit(codeql, config) {
const sourceRoot = path.resolve();
fs.mkdirSync(util.getCodeQLDatabasesDir(config.tempDir), { recursive: true });
// TODO: replace this code once CodeQL supports multi-language tracing
for (let language of config.languages) {
for (const language of config.languages) {
// Init language database
await codeql.databaseInit(util.getCodeQLDatabasePath(config.tempDir, language), language, sourceRoot);
}
@@ -110,13 +110,15 @@ async function injectWindowsTracer(processName, processLevel, config, codeql, tr
Invoke-Expression "&$tracer --inject=$id"`;
}
const injectTracerPath = path.join(config.tempDir, 'inject-tracer.ps1');
const injectTracerPath = path.join(config.tempDir, "inject-tracer.ps1");
fs.writeFileSync(injectTracerPath, script);
await new toolrunnner.ToolRunner('powershell', [
'-ExecutionPolicy', 'Bypass',
'-file', injectTracerPath,
path.resolve(path.dirname(codeql.getPath()), 'tools', 'win64', 'tracer.exe'),
], { env: { 'ODASA_TRACER_CONFIGURATION': tracerConfig.spec } }).exec();
await new toolrunnner.ToolRunner("powershell", [
"-ExecutionPolicy",
"Bypass",
"-file",
injectTracerPath,
path.resolve(path.dirname(codeql.getPath()), "tools", "win64", "tracer.exe"),
], { env: { ODASA_TRACER_CONFIGURATION: tracerConfig.spec } }).exec();
}
exports.injectWindowsTracer = injectWindowsTracer;
//# sourceMappingURL=init.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,0EAA4D;AAC5D,uCAAyB;AACzB,2CAA6B;AAE7B,gEAAkD;AAClD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAAkB,EAClB,SAAiB,EACjB,OAAe,EACf,QAAgB,EAChB,IAAe,EACf,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,MAAM,GAAG,MAAM,oBAAW,CAC9B,SAAS,EACT,UAAU,EACV,SAAS,EACT,OAAO,EACP,QAAQ,EACR,IAAI,EACJ,MAAM,CAAC,CAAC;IACV,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,UAAkB,EAClB,SAAiB,EACjB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,UAAU,EACV,SAAS,EACT,MAAM,CAAC,CAAC;IACV,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA7BD,gCA6BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,IAAI,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACrC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC;KACvG;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAfD,0BAeC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;gDAiBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,WAAW,CAAC,UAAU,CAC9B,YAAY,EACZ;QACE,kBAAkB,EAAE,QAAQ;QAC5B,OAAO,EAAE,gBAAgB;QACzB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,YAAY,CAAC;KAC7E,EACD,EAAE,GAAG,EAAE,EAAE,4BAA4B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,IAAI,EAAE,CAAC;AACzE,CAAC;AAhFD,kDAgFC"}
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,0EAA4D;AAC5D,uCAAyB;AACzB,2CAA6B;AAE7B,gEAAkD;AAClD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAAkB,EAClB,SAAiB,EACjB,OAAe,EACf,QAAgB,EAChB,IAAe,EACf,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,MAAM,GAAG,MAAM,oBAAW,CAC9B,SAAS,EACT,UAAU,EACV,SAAS,EACT,OAAO,EACP,QAAQ,EACR,IAAI,EACJ,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAtBD,gCAsBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,UAAkB,EAClB,SAAiB,EACjB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,UAAU,EACV,SAAS,EACT,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9BD,gCA8BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnBD,0BAmBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;gDAiBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,WAAW,CAAC,UAAU,CAC9B,YAAY,EACZ;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AAxFD,kDAwFC"}

10
lib/languages.js generated
View File

@@ -12,10 +12,10 @@ var Language;
})(Language = exports.Language || (exports.Language = {}));
// Additional names for languages
const LANGUAGE_ALIASES = {
'c': Language.cpp,
'c++': Language.cpp,
'c#': Language.csharp,
'typescript': Language.javascript,
c: Language.cpp,
"c++": Language.cpp,
"c#": Language.csharp,
typescript: Language.javascript,
};
// Translate from user input or GitHub's API names for languages to CodeQL's names for languages
function parseLanguage(language) {
@@ -33,7 +33,7 @@ function parseLanguage(language) {
}
exports.parseLanguage = parseLanguage;
function isTracedLanguage(language) {
return ['cpp', 'java', 'csharp'].includes(language);
return ["cpp", "java", "csharp"].includes(language);
}
exports.isTracedLanguage = isTracedLanguage;
function isScannedLanguage(language) {

View File

@@ -1 +1 @@
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;AAAA,wCAAwC;AACxC,IAAY,QAOX;AAPD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;AACnB,CAAC,EAPW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAOnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAA+B;IACnD,GAAG,EAAE,QAAQ,CAAC,GAAG;IACjB,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,YAAY,EAAE,QAAQ,CAAC,UAAU;CAClC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAGD,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACtD,CAAC;AAFD,4CAEC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;AAAA,wCAAwC;AACxC,IAAY,QAOX;AAPD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;AACnB,CAAC,EAPW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAOnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAAiC;IACrD,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACtD,CAAC;AAFD,4CAEC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}

32
lib/languages.test.js generated
View File

@@ -7,25 +7,25 @@ const ava_1 = __importDefault(require("ava"));
const languages_1 = require("./languages");
const testing_utils_1 = require("./testing-utils");
testing_utils_1.setupTests(ava_1.default);
ava_1.default('parseLangauge', async (t) => {
ava_1.default("parseLangauge", async (t) => {
// Exact matches
t.deepEqual(languages_1.parseLanguage('csharp'), languages_1.Language.csharp);
t.deepEqual(languages_1.parseLanguage('cpp'), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage('go'), languages_1.Language.go);
t.deepEqual(languages_1.parseLanguage('java'), languages_1.Language.java);
t.deepEqual(languages_1.parseLanguage('javascript'), languages_1.Language.javascript);
t.deepEqual(languages_1.parseLanguage('python'), languages_1.Language.python);
t.deepEqual(languages_1.parseLanguage("csharp"), languages_1.Language.csharp);
t.deepEqual(languages_1.parseLanguage("cpp"), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage("go"), languages_1.Language.go);
t.deepEqual(languages_1.parseLanguage("java"), languages_1.Language.java);
t.deepEqual(languages_1.parseLanguage("javascript"), languages_1.Language.javascript);
t.deepEqual(languages_1.parseLanguage("python"), languages_1.Language.python);
// Aliases
t.deepEqual(languages_1.parseLanguage('c'), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage('c++'), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage('c#'), languages_1.Language.csharp);
t.deepEqual(languages_1.parseLanguage('typescript'), languages_1.Language.javascript);
t.deepEqual(languages_1.parseLanguage("c"), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage("c++"), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage("c#"), languages_1.Language.csharp);
t.deepEqual(languages_1.parseLanguage("typescript"), languages_1.Language.javascript);
// Not matches
t.deepEqual(languages_1.parseLanguage('foo'), undefined);
t.deepEqual(languages_1.parseLanguage(' '), undefined);
t.deepEqual(languages_1.parseLanguage(''), undefined);
t.deepEqual(languages_1.parseLanguage("foo"), undefined);
t.deepEqual(languages_1.parseLanguage(" "), undefined);
t.deepEqual(languages_1.parseLanguage(""), undefined);
});
ava_1.default('isTracedLanguage', async (t) => {
ava_1.default("isTracedLanguage", async (t) => {
t.true(languages_1.isTracedLanguage(languages_1.Language.cpp));
t.true(languages_1.isTracedLanguage(languages_1.Language.java));
t.true(languages_1.isTracedLanguage(languages_1.Language.csharp));
@@ -33,7 +33,7 @@ ava_1.default('isTracedLanguage', async (t) => {
t.false(languages_1.isTracedLanguage(languages_1.Language.javascript));
t.false(languages_1.isTracedLanguage(languages_1.Language.python));
});
ava_1.default('isScannedLanguage', async (t) => {
ava_1.default("isScannedLanguage", async (t) => {
t.false(languages_1.isScannedLanguage(languages_1.Language.cpp));
t.false(languages_1.isScannedLanguage(languages_1.Language.java));
t.false(languages_1.isScannedLanguage(languages_1.Language.csharp));

View File

@@ -1 +1 @@
{"version":3,"file":"languages.test.js","sourceRoot":"","sources":["../src/languages.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,2CAAyF;AACzF,mDAA2C;AAE3C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,gBAAgB;IAChB,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IACtD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,MAAM,CAAC,EAAE,oBAAQ,CAAC,IAAI,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAC9D,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAEtD,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAE9D,cAAc;IACd,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC7C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAC;AAC5C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACjC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE1C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mBAAmB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAClC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACzC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE5C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC"}
{"version":3,"file":"languages.test.js","sourceRoot":"","sources":["../src/languages.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,2CAKqB;AACrB,mDAA6C;AAE7C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,gBAAgB;IAChB,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IACtD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,MAAM,CAAC,EAAE,oBAAQ,CAAC,IAAI,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAC9D,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAEtD,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAE9D,cAAc;IACd,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC7C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAC;AAC5C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE1C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mBAAmB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACpC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACzC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE5C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC"}

2
lib/repository.js generated
View File

@@ -1,7 +1,7 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function parseRepositoryNwo(input) {
const parts = input.split('/');
const parts = input.split("/");
if (parts.length !== 2) {
throw new Error(`"${input}" is not a valid repository name`);
}

154
lib/runner.js generated
View File

@@ -22,18 +22,18 @@ const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util_1 = require("./util");
const program = new commander_1.Command();
program.version('0.0.1');
program.version("0.0.1");
function parseGithubUrl(inputUrl) {
try {
const url = new URL(inputUrl);
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === 'github.com' || url.hostname === 'api.github.com') {
return 'https://github.com';
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
return "https://github.com";
}
// Remove the API prefix if it's present
if (url.pathname.indexOf('/api/v3') !== -1) {
url.pathname = url.pathname.substring(0, url.pathname.indexOf('/api/v3'));
if (url.pathname.indexOf("/api/v3") !== -1) {
url.pathname = url.pathname.substring(0, url.pathname.indexOf("/api/v3"));
}
return url.toString();
}
@@ -42,42 +42,42 @@ function parseGithubUrl(inputUrl) {
}
}
function getTempDir(userInput) {
const tempDir = path.join(userInput || process.cwd(), 'codeql-runner');
const tempDir = path.join(userInput || process.cwd(), "codeql-runner");
if (!fs.existsSync(tempDir)) {
fs.mkdirSync(tempDir, { recursive: true });
}
return tempDir;
}
function getToolsDir(userInput) {
const toolsDir = userInput || path.join(os.homedir(), 'codeql-runner-tools');
const toolsDir = userInput || path.join(os.homedir(), "codeql-runner-tools");
if (!fs.existsSync(toolsDir)) {
fs.mkdirSync(toolsDir, { recursive: true });
}
return toolsDir;
}
const codeqlEnvJsonFilename = 'codeql-env.json';
const codeqlEnvJsonFilename = "codeql-env.json";
// Imports the environment from codeqlEnvJsonFilename if not already present
function importTracerEnvironment(config) {
if (!('ODASA_TRACER_CONFIGURATION' in process.env)) {
if (!("ODASA_TRACER_CONFIGURATION" in process.env)) {
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
const env = JSON.parse(fs.readFileSync(jsonEnvFile).toString('utf-8'));
Object.keys(env).forEach(key => process.env[key] = env[key]);
const env = JSON.parse(fs.readFileSync(jsonEnvFile).toString("utf-8"));
Object.keys(env).forEach((key) => (process.env[key] = env[key]));
}
}
// Allow the user to specify refs in full refs/heads/branch format
// or just the short branch name and prepend "refs/heads/" to it.
function parseRef(userInput) {
if (userInput.startsWith('refs/')) {
if (userInput.startsWith("refs/")) {
return userInput;
}
else {
return 'refs/heads/' + userInput;
return `refs/heads/${userInput}`;
}
}
// Parses the --trace-process-name arg from process.argv, or returns undefined
function parseTraceProcessName() {
for (let i = 0; i < process.argv.length - 1; i++) {
if (process.argv[i] === '--trace-process-name') {
if (process.argv[i] === "--trace-process-name") {
return process.argv[i + 1];
}
}
@@ -86,7 +86,7 @@ function parseTraceProcessName() {
// Parses the --trace-process-level arg from process.argv, or returns undefined
function parseTraceProcessLevel() {
for (let i = 0; i < process.argv.length - 1; i++) {
if (process.argv[i] === '--trace-process-level') {
if (process.argv[i] === "--trace-process-level") {
const v = parseInt(process.argv[i + 1], 10);
return isNaN(v) ? undefined : v;
}
@@ -94,19 +94,19 @@ function parseTraceProcessLevel() {
return undefined;
}
program
.command('init')
.description('Initializes CodeQL')
.requiredOption('--repository <repository>', 'Repository name. (Required)')
.requiredOption('--github-url <url>', 'URL of GitHub instance. (Required)')
.requiredOption('--github-auth <auth>', 'GitHub Apps token or personal access token. (Required)')
.option('--languages <languages>', 'Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.')
.option('--queries <queries>', 'Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.')
.option('--config-file <file>', 'Path to config file.')
.option('--codeql-path <path>', 'Path to a copy of the CodeQL CLI executable to use. Otherwise downloads a copy.')
.option('--temp-dir <dir>', 'Directory to use for temporary files. Default is "./codeql-runner".')
.option('--tools-dir <dir>', 'Directory to use for CodeQL tools and other files to store between runs. Default is a subdirectory of the home directory.')
.option('--checkout-path <path>', 'Checkout path. Default is the current working directory.')
.option('--debug', 'Print more verbose output', false)
.command("init")
.description("Initializes CodeQL")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--languages <languages>", "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.")
.option("--queries <queries>", "Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.")
.option("--config-file <file>", "Path to config file.")
.option("--codeql-path <path>", "Path to a copy of the CodeQL CLI executable to use. Otherwise downloads a copy.")
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
.option("--tools-dir <dir>", "Directory to use for CodeQL tools and other files to store between runs. Default is a subdirectory of the home directory.")
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--debug", "Print more verbose output", false)
// This prevents a message like: error: unknown option '--trace-process-level'
// Remove this if commander.js starts supporting hidden options.
.allowUnknownOption()
@@ -124,29 +124,29 @@ program
codeql = codeql_1.getCodeQL(cmd.codeqlPath);
}
else {
codeql = await init_1.initCodeQL(undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), tempDir, toolsDir, 'runner', logger);
codeql = await init_1.initCodeQL(undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), tempDir, toolsDir, "runner", logger);
}
const config = await init_1.initConfig(cmd.languages, cmd.queries, cmd.configFile, repository_1.parseRepositoryNwo(cmd.repository), tempDir, toolsDir, codeql, cmd.checkoutPath || process.cwd(), cmd.githubAuth, parseGithubUrl(cmd.githubUrl), logger);
const tracerConfig = await init_1.runInit(codeql, config);
if (tracerConfig === undefined) {
return;
}
if (process.platform === 'win32') {
if (process.platform === "win32") {
await init_1.injectWindowsTracer(parseTraceProcessName(), parseTraceProcessLevel(), config, codeql, tracerConfig);
}
// Always output a json file of the env that can be consumed programatically
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
fs.writeFileSync(jsonEnvFile, JSON.stringify(tracerConfig.env));
if (process.platform === 'win32') {
const batEnvFile = path.join(config.tempDir, 'codeql-env.bat');
if (process.platform === "win32") {
const batEnvFile = path.join(config.tempDir, "codeql-env.bat");
const batEnvFileContents = Object.entries(tracerConfig.env)
.map(([key, value]) => `Set ${key}=${value}`)
.join('\n');
.join("\n");
fs.writeFileSync(batEnvFile, batEnvFileContents);
const powershellEnvFile = path.join(config.tempDir, 'codeql-env.sh');
const powershellEnvFile = path.join(config.tempDir, "codeql-env.sh");
const powershellEnvFileContents = Object.entries(tracerConfig.env)
.map(([key, value]) => `$env:${key}="${value}"`)
.join('\n');
.join("\n");
fs.writeFileSync(powershellEnvFile, powershellEnvFileContents);
logger.info(`\nCodeQL environment output to "${jsonEnvFile}", "${batEnvFile}" and "${powershellEnvFile}". ` +
`Please export these variables to future processes so the build can be traced. ` +
@@ -155,11 +155,11 @@ program
}
else {
// Assume that anything that's not windows is using a unix-style shell
const shEnvFile = path.join(config.tempDir, 'codeql-env.sh');
const shEnvFile = path.join(config.tempDir, "codeql-env.sh");
const shEnvFileContents = Object.entries(tracerConfig.env)
// Some vars contain ${LIB} that we do not want to be expanded when executing this script
.map(([key, value]) => `export ${key}="${value.replace('$', '\\$')}"`)
.join('\n');
.map(([key, value]) => `export ${key}="${value.replace("$", "\\$")}"`)
.join("\n");
fs.writeFileSync(shEnvFile, shEnvFileContents);
logger.info(`\nCodeQL environment output to "${jsonEnvFile}" and "${shEnvFile}". ` +
`Please export these variables to future processes so the build can be traced, ` +
@@ -167,17 +167,17 @@ program
}
}
catch (e) {
logger.error('Init failed');
logger.error("Init failed");
logger.error(e);
process.exitCode = 1;
}
});
program
.command('autobuild')
.description('Attempts to automatically build code')
.option('--language <language>', 'The language to build. Otherwise will detect the dominant compiled language.')
.option('--temp-dir <dir>', 'Directory to use for temporary files. Default is "./codeql-runner".')
.option('--debug', 'Print more verbose output', false)
.command("autobuild")
.description("Attempts to automatically build code")
.option("--language <language>", "The language to build. Otherwise will detect the dominant compiled language.")
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
.option("--debug", "Print more verbose output", false)
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
@@ -192,7 +192,7 @@ program
language = languages_1.parseLanguage(cmd.language);
if (language === undefined || !config.languages.includes(language)) {
throw new Error(`"${cmd.language}" is not a recognised language. ` +
`Known languages in this project are ${config.languages.join(', ')}.`);
`Known languages in this project are ${config.languages.join(", ")}.`);
}
}
else {
@@ -203,64 +203,64 @@ program
}
}
catch (e) {
logger.error('Autobuild failed');
logger.error("Autobuild failed");
logger.error(e);
process.exitCode = 1;
}
});
program
.command('analyze')
.description('Finishes extracting code and runs CodeQL queries')
.requiredOption('--repository <repository>', 'Repository name. (Required)')
.requiredOption('--commit <commit>', 'SHA of commit that was analyzed. (Required)')
.requiredOption('--ref <ref>', 'Name of ref that was analyzed. (Required)')
.requiredOption('--github-url <url>', 'URL of GitHub instance. (Required)')
.requiredOption('--github-auth <auth>', 'GitHub Apps token or personal access token. (Required)')
.option('--checkout-path <path>', 'Checkout path. Default is the current working directory.')
.option('--no-upload', 'Do not upload results after analysis.')
.option('--output-dir <dir>', 'Directory to output SARIF files to. Default is in the temp directory.')
.option('--ram <ram>', 'Amount of memory to use when running queries. Default is to use all available memory.')
.option('--no-add-snippets', 'Specify whether to include code snippets in the sarif output.')
.option('--threads <threads>', 'Number of threads to use when running queries. ' +
'Default is to use all available cores.')
.option('--temp-dir <dir>', 'Directory to use for temporary files. Default is "./codeql-runner".')
.option('--debug', 'Print more verbose output', false)
.command("analyze")
.description("Finishes extracting code and runs CodeQL queries")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--no-upload", "Do not upload results after analysis.")
.option("--output-dir <dir>", "Directory to output SARIF files to. Default is in the temp directory.")
.option("--ram <ram>", "Amount of memory to use when running queries. Default is to use all available memory.")
.option("--no-add-snippets", "Specify whether to include code snippets in the sarif output.")
.option("--threads <threads>", "Number of threads to use when running queries. " +
"Default is to use all available cores.")
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
.option("--debug", "Print more verbose output", false)
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
const tempDir = getTempDir(cmd.tempDir);
const outputDir = cmd.outputDir || path.join(tempDir, 'codeql-sarif');
const outputDir = cmd.outputDir || path.join(tempDir, "codeql-sarif");
const config = await config_utils_1.getConfig(getTempDir(cmd.tempDir), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. " +
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
}
await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), cmd.upload, 'runner', outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), config, logger);
await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), cmd.upload, "runner", outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), config, logger);
}
catch (e) {
logger.error('Analyze failed');
logger.error("Analyze failed");
logger.error(e);
process.exitCode = 1;
}
});
program
.command('upload')
.description('Uploads a SARIF file, or all SARIF files from a directory, to code scanning')
.requiredOption('--sarif-file <file>', 'SARIF file to upload, or a directory containing multiple SARIF files. (Required)')
.requiredOption('--repository <repository>', 'Repository name. (Required)')
.requiredOption('--commit <commit>', 'SHA of commit that was analyzed. (Required)')
.requiredOption('--ref <ref>', 'Name of ref that was analyzed. (Required)')
.requiredOption('--github-url <url>', 'URL of GitHub instance. (Required)')
.requiredOption('--github-auth <auth>', 'GitHub Apps token or personal access token. (Required)')
.option('--checkout-path <path>', 'Checkout path. Default is the current working directory.')
.option('--debug', 'Print more verbose output', false)
.command("upload")
.description("Uploads a SARIF file, or all SARIF files from a directory, to code scanning")
.requiredOption("--sarif-file <file>", "SARIF file to upload, or a directory containing multiple SARIF files. (Required)")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--debug", "Print more verbose output", false)
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), 'runner', logger);
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), "runner", logger);
}
catch (e) {
logger.error('Upload failed');
logger.error("Upload failed");
logger.error(e);
process.exitCode = 1;
}

File diff suppressed because one or more lines are too long

View File

@@ -1,10 +1,10 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
// The time at which the first action (normally init) started executing.
// If a workflow invokes a different action without first invoking the init
// action (i.e. the upload action is being used by a third-party integrator)
// then this variable will be assigned the start time of the action invoked
// rather that the init action.
exports.CODEQL_WORKFLOW_STARTED_AT = 'CODEQL_WORKFLOW_STARTED_AT';
exports.CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
//# sourceMappingURL=shared-environment.js.map

12
lib/testing-utils.js generated
View File

@@ -19,19 +19,19 @@ function wrapOutput(context) {
// write(str: Uint8Array | string, encoding?: string, cb?: (err?: Error) => void): boolean;
return (chunk, encoding, cb) => {
// Work out which method overload we are in
if (cb === undefined && typeof encoding === 'function') {
if (cb === undefined && typeof encoding === "function") {
cb = encoding;
encoding = undefined;
}
// Record the output
if (typeof chunk === 'string') {
if (typeof chunk === "string") {
context.testOutput += chunk;
}
else {
context.testOutput += new TextDecoder(encoding || 'utf-8').decode(chunk);
context.testOutput += new TextDecoder(encoding || "utf-8").decode(chunk);
}
// Satisfy contract by calling callback when done
if (cb !== undefined && typeof cb === 'function') {
if (cb !== undefined && typeof cb === "function") {
cb();
}
return true;
@@ -39,7 +39,7 @@ function wrapOutput(context) {
}
function setupTests(test) {
const typedTest = test;
typedTest.beforeEach(t => {
typedTest.beforeEach((t) => {
// Set an empty CodeQL object so that all method calls will fail
// unless the test explicitly sets one up.
CodeQL.setCodeQL({});
@@ -57,7 +57,7 @@ function setupTests(test) {
t.context.env = {};
Object.assign(t.context.env, process.env);
});
typedTest.afterEach.always(t => {
typedTest.afterEach.always((t) => {
// Restore stdout and stderr
// The captured output is only replayed if the test failed
process.stdout.write = t.context.stdoutWrite;

View File

@@ -1 +1 @@
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AACA,kDAA0B;AAE1B,iDAAmC;AAInC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CAAC,KAA0B,EAAE,QAAiB,EAAE,EAA0B,EAAW,EAAE;QAC5F,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACvB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;QAC7B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC"}
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AACA,kDAA0B;AAE1B,iDAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC"}

76
lib/tracer-config.js generated
View File

@@ -11,35 +11,38 @@ const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const languages_1 = require("./languages");
const util = __importStar(require("./util"));
const CRITICAL_TRACER_VARS = new Set(['SEMMLE_PRELOAD_libtrace',
const CRITICAL_TRACER_VARS = new Set([
"SEMMLE_PRELOAD_libtrace",
,
'SEMMLE_RUNNER',
"SEMMLE_RUNNER",
,
'SEMMLE_COPY_EXECUTABLES_ROOT',
"SEMMLE_COPY_EXECUTABLES_ROOT",
,
'SEMMLE_DEPTRACE_SOCKET',
"SEMMLE_DEPTRACE_SOCKET",
,
'SEMMLE_JAVA_TOOL_OPTIONS'
"SEMMLE_JAVA_TOOL_OPTIONS",
]);
async function getTracerConfigForLanguage(codeql, config, language) {
const env = await codeql.getTracerEnv(util.getCodeQLDatabasePath(config.tempDir, language));
const spec = env['ODASA_TRACER_CONFIGURATION'];
const spec = env["ODASA_TRACER_CONFIGURATION"];
const info = { spec, env: {} };
// Extract critical tracer variables from the environment
for (let entry of Object.entries(env)) {
for (const entry of Object.entries(env)) {
const key = entry[0];
const value = entry[1];
// skip ODASA_TRACER_CONFIGURATION as it is handled separately
if (key === 'ODASA_TRACER_CONFIGURATION') {
if (key === "ODASA_TRACER_CONFIGURATION") {
continue;
}
// skip undefined values
if (typeof value === 'undefined') {
if (typeof value === "undefined") {
continue;
}
// Keep variables that do not exist in current environment. In addition always keep
// critical and CODEQL_ variables
if (typeof process.env[key] === 'undefined' || CRITICAL_TRACER_VARS.has(key) || key.startsWith('CODEQL_')) {
if (typeof process.env[key] === "undefined" ||
CRITICAL_TRACER_VARS.has(key) ||
key.startsWith("CODEQL_")) {
info.env[key] = value;
}
}
@@ -54,17 +57,16 @@ function concatTracerConfigs(tracerConfigs, config) {
let copyExecutables = false;
let envSize = 0;
for (const v of Object.values(tracerConfigs)) {
for (let e of Object.entries(v.env)) {
for (const e of Object.entries(v.env)) {
const name = e[0];
const value = e[1];
// skip SEMMLE_COPY_EXECUTABLES_ROOT as it is handled separately
if (name === 'SEMMLE_COPY_EXECUTABLES_ROOT') {
if (name === "SEMMLE_COPY_EXECUTABLES_ROOT") {
copyExecutables = true;
}
else if (name in env) {
if (env[name] !== value) {
throw Error('Incompatible values in environment parameter ' +
name + ': ' + env[name] + ' and ' + value);
throw Error(`Incompatible values in environment parameter ${name}: ${env[name]} and ${value}`);
}
}
else {
@@ -74,44 +76,50 @@ function concatTracerConfigs(tracerConfigs, config) {
}
}
// Concatenate spec files into a new spec file
let languages = Object.keys(tracerConfigs);
const cppIndex = languages.indexOf('cpp');
const languages = Object.keys(tracerConfigs);
const cppIndex = languages.indexOf("cpp");
// Make sure cpp is the last language, if it's present since it must be concatenated last
if (cppIndex !== -1) {
let lastLang = languages[languages.length - 1];
const lastLang = languages[languages.length - 1];
languages[languages.length - 1] = languages[cppIndex];
languages[cppIndex] = lastLang;
}
let totalLines = [];
const totalLines = [];
let totalCount = 0;
for (let lang of languages) {
const lines = fs.readFileSync(tracerConfigs[lang].spec, 'utf8').split(/\r?\n/);
for (const lang of languages) {
const lines = fs
.readFileSync(tracerConfigs[lang].spec, "utf8")
.split(/\r?\n/);
const count = parseInt(lines[1], 10);
totalCount += count;
totalLines.push(...lines.slice(2));
}
const newLogFilePath = path.resolve(config.tempDir, 'compound-build-tracer.log');
const spec = path.resolve(config.tempDir, 'compound-spec');
const compoundTempFolder = path.resolve(config.tempDir, 'compound-temp');
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
const newLogFilePath = path.resolve(config.tempDir, "compound-build-tracer.log");
const spec = path.resolve(config.tempDir, "compound-spec");
const compoundTempFolder = path.resolve(config.tempDir, "compound-temp");
const newSpecContent = [
newLogFilePath,
totalCount.toString(10),
...totalLines,
];
if (copyExecutables) {
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
env["SEMMLE_COPY_EXECUTABLES_ROOT"] = compoundTempFolder;
envSize += 1;
}
fs.writeFileSync(spec, newSpecContent.join('\n'));
fs.writeFileSync(spec, newSpecContent.join("\n"));
// Prepare the content of the compound environment file
let buffer = Buffer.alloc(4);
buffer.writeInt32LE(envSize, 0);
for (let e of Object.entries(env)) {
for (const e of Object.entries(env)) {
const key = e[0];
const value = e[1];
const lineBuffer = new Buffer(key + '=' + value + '\0', 'utf8');
const lineBuffer = new Buffer(`${key}=${value}\0`, "utf8");
const sizeBuffer = Buffer.alloc(4);
sizeBuffer.writeInt32LE(lineBuffer.length, 0);
buffer = Buffer.concat([buffer, sizeBuffer, lineBuffer]);
}
// Write the compound environment
const envPath = spec + '.environment';
const envPath = `${spec}.environment`;
fs.writeFileSync(envPath, buffer);
return { env, spec };
}
@@ -129,13 +137,13 @@ async function getCombinedTracerConfig(config, codeql) {
}
const mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
// Add a couple more variables
mainTracerConfig.env['ODASA_TRACER_CONFIGURATION'] = mainTracerConfig.spec;
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === 'darwin') {
mainTracerConfig.env['DYLD_INSERT_LIBRARIES'] = path.join(codeQLDir, 'tools', 'osx64', 'libtrace.dylib');
if (process.platform === "darwin") {
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
}
else if (process.platform !== 'win32') {
mainTracerConfig.env['LD_PRELOAD'] = path.join(codeQLDir, 'tools', 'linux64', '${LIB}trace.so');
else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
}
return mainTracerConfig;
}

View File

@@ -1 +1 @@
{"version":3,"file":"tracer-config.js","sourceRoot":"","sources":["../src/tracer-config.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAI7B,2CAAyD;AACzD,6CAA+B;AAO/B,MAAM,oBAAoB,GAAG,IAAI,GAAG,CAClC,CAAC,yBAAyB;IACxB,AADyB;IACvB,eAAe;IACjB,AADkB;IAChB,8BAA8B;IAChC,AADiC;IAC/B,wBAAwB;IAC1B,AAD2B;IACzB,0BAA0B;CAC7B,CAAC,CAAC;AAEE,KAAK,UAAU,0BAA0B,CAC9C,MAAc,EACd,MAA0B,EAC1B,QAAkB;IAElB,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC,CAAC;IAE5F,MAAM,IAAI,GAAG,GAAG,CAAC,4BAA4B,CAAC,CAAC;IAC/C,MAAM,IAAI,GAAiB,EAAE,IAAI,EAAE,GAAG,EAAE,EAAE,EAAE,CAAC;IAE7C,yDAAyD;IACzD,KAAK,IAAI,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACrC,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACvB,8DAA8D;QAC9D,IAAI,GAAG,KAAK,4BAA4B,EAAE;YACxC,SAAS;SACV;QACD,wBAAwB;QACxB,IAAI,OAAO,KAAK,KAAK,WAAW,EAAE;YAChC,SAAS;SACV;QACD,mFAAmF;QACnF,iCAAiC;QACjC,IAAI,OAAO,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,WAAW,IAAI,oBAAoB,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YACzG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;SACvB;KACF;IACD,OAAO,IAAI,CAAC;AACd,CAAC;AA7BD,gEA6BC;AAED,SAAgB,mBAAmB,CACjC,aAA+C,EAC/C,MAA0B;IAE1B,iGAAiG;IACjG,0FAA0F;IAE1F,yBAAyB;IACzB,MAAM,GAAG,GAA+B,EAAE,CAAC;IAC3C,IAAI,eAAe,GAAG,KAAK,CAAC;IAC5B,IAAI,OAAO,GAAG,CAAC,CAAC;IAChB,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE;QAC5C,KAAK,IAAI,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;YACnC,MAAM,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YAClB,MAAM,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YACnB,gEAAgE;YAChE,IAAI,IAAI,KAAK,8BAA8B,EAAE;gBAC3C,eAAe,GAAG,IAAI,CAAC;aACxB;iBAAM,IAAI,IAAI,IAAI,GAAG,EAAE;gBACtB,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,KAAK,EAAE;oBACvB,MAAM,KAAK,CAAC,+CAA+C;wBACzD,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC,IAAI,CAAC,GAAG,OAAO,GAAG,KAAK,CAAC,CAAC;iBAC9C;aACF;iBAAM;gBACL,GAAG,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC;gBAClB,OAAO,IAAI,CAAC,CAAC;aACd;SACF;KACF;IAED,8CAA8C;IAC9C,IAAI,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;IAC3C,MAAM,QAAQ,GAAG,SAAS,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IAC1C,yFAAyF;IACzF,IAAI,QAAQ,KAAK,CAAC,CAAC,EAAE;QACnB,IAAI,QAAQ,GAAG,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAC/C,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC,QAAQ,CAAC,CAAC;QACtD,SAAS,CAAC,QAAQ,CAAC,GAAG,QAAQ,CAAC;KAChC;IAED,IAAI,UAAU,GAAa,EAAE,CAAC;IAC9B,IAAI,UAAU,GAAG,CAAC,CAAC;IACnB,KAAK,IAAI,IAAI,IAAI,SAAS,EAAE;QAC1B,MAAM,KAAK,GAAG,EAAE,CAAC,YAAY,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC/E,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QACrC,UAAU,IAAI,KAAK,CAAC;QACpB,UAAU,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;KACpC;IAED,MAAM,cAAc,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,2BAA2B,CAAC,CAAC;IACjF,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;IAC3D,MAAM,kBAAkB,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;IACzE,MAAM,cAAc,GAAG,CAAC,cAAc,EAAE,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,GAAG,UAAU,CAAC,CAAC;IAEhF,IAAI,eAAe,EAAE;QACnB,GAAG,CAAC,8BAA8B,CAAC,GAAG,kBAAkB,CAAC;QACzD,OAAO,IAAI,CAAC,CAAC;KACd;IAED,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;IAElD,uDAAuD;IACvD,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAC7B,MAAM,CAAC,YAAY,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;IAChC,KAAK,IAAI,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACjC,MAAM,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACjB,MAAM,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACnB,MAAM,UAAU,GAAG,IAAI,MAAM,CAAC,GAAG,GAAG,GAAG,GAAG,KAAK,GAAG,IAAI,EAAE,MAAM,CAAC,CAAC;QAChE,MAAM,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACnC,UAAU,CAAC,YAAY,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC9C,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC,CAAC;KAC1D;IACD,iCAAiC;IACjC,MAAM,OAAO,GAAG,IAAI,GAAG,cAAc,CAAC;IACtC,EAAE,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAElC,OAAO,EAAE,GAAG,EAAE,IAAI,EAAE,CAAC;AACvB,CAAC;AA7ED,kDA6EC;AAEM,KAAK,UAAU,uBAAuB,CAC3C,MAA0B,EAC1B,MAAc;IAEd,kEAAkE;IAClE,MAAM,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IAClE,IAAI,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;QAChC,OAAO,SAAS,CAAC;KAClB;IAED,uDAAuD;IACvD,MAAM,qBAAqB,GAAqC,EAAE,CAAC;IACnE,KAAK,MAAM,QAAQ,IAAI,eAAe,EAAE;QACtC,qBAAqB,CAAC,QAAQ,CAAC,GAAG,MAAM,0BAA0B,CAAC,MAAM,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;KAC9F;IACD,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,qBAAqB,EAAE,MAAM,CAAC,CAAC;IAE5E,8BAA8B;IAC9B,gBAAgB,CAAC,GAAG,CAAC,4BAA4B,CAAC,GAAG,gBAAgB,CAAC,IAAI,CAAC;IAC3E,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC;IACjD,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACjC,gBAAgB,CAAC,GAAG,CAAC,uBAAuB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC;KAC1G;SAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QACvC,gBAAgB,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,OAAO,EAAE,SAAS,EAAE,gBAAgB,CAAC,CAAC;KACjG;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AA3BD,0DA2BC"}
{"version":3,"file":"tracer-config.js","sourceRoot":"","sources":["../src/tracer-config.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAI7B,2CAAyD;AACzD,6CAA+B;AAO/B,MAAM,oBAAoB,GAAG,IAAI,GAAG,CAAC;IACnC,yBAAyB;IACzB,AAD0B;IAE1B,eAAe;IACf,AADgB;IAEhB,8BAA8B;IAC9B,AAD+B;IAE/B,wBAAwB;IACxB,AADyB;IAEzB,0BAA0B;CAC3B,CAAC,CAAC;AAEI,KAAK,UAAU,0BAA0B,CAC9C,MAAc,EACd,MAA0B,EAC1B,QAAkB;IAElB,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,YAAY,CACnC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CACrD,CAAC;IAEF,MAAM,IAAI,GAAG,GAAG,CAAC,4BAA4B,CAAC,CAAC;IAC/C,MAAM,IAAI,GAAiB,EAAE,IAAI,EAAE,GAAG,EAAE,EAAE,EAAE,CAAC;IAE7C,yDAAyD;IACzD,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACvC,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACvB,8DAA8D;QAC9D,IAAI,GAAG,KAAK,4BAA4B,EAAE;YACxC,SAAS;SACV;QACD,wBAAwB;QACxB,IAAI,OAAO,KAAK,KAAK,WAAW,EAAE;YAChC,SAAS;SACV;QACD,mFAAmF;QACnF,iCAAiC;QACjC,IACE,OAAO,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,WAAW;YACvC,oBAAoB,CAAC,GAAG,CAAC,GAAG,CAAC;YAC7B,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EACzB;YACA,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;SACvB;KACF;IACD,OAAO,IAAI,CAAC;AACd,CAAC;AAnCD,gEAmCC;AAED,SAAgB,mBAAmB,CACjC,aAA+C,EAC/C,MAA0B;IAE1B,iGAAiG;IACjG,0FAA0F;IAE1F,yBAAyB;IACzB,MAAM,GAAG,GAA8B,EAAE,CAAC;IAC1C,IAAI,eAAe,GAAG,KAAK,CAAC;IAC5B,IAAI,OAAO,GAAG,CAAC,CAAC;IAChB,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE;QAC5C,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE;YACrC,MAAM,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YAClB,MAAM,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YACnB,gEAAgE;YAChE,IAAI,IAAI,KAAK,8BAA8B,EAAE;gBAC3C,eAAe,GAAG,IAAI,CAAC;aACxB;iBAAM,IAAI,IAAI,IAAI,GAAG,EAAE;gBACtB,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,KAAK,EAAE;oBACvB,MAAM,KAAK,CACT,gDAAgD,IAAI,KAAK,GAAG,CAAC,IAAI,CAAC,QAAQ,KAAK,EAAE,CAClF,CAAC;iBACH;aACF;iBAAM;gBACL,GAAG,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC;gBAClB,OAAO,IAAI,CAAC,CAAC;aACd;SACF;KACF;IAED,8CAA8C;IAC9C,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;IAC7C,MAAM,QAAQ,GAAG,SAAS,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IAC1C,yFAAyF;IACzF,IAAI,QAAQ,KAAK,CAAC,CAAC,EAAE;QACnB,MAAM,QAAQ,GAAG,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QACjD,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC,QAAQ,CAAC,CAAC;QACtD,SAAS,CAAC,QAAQ,CAAC,GAAG,QAAQ,CAAC;KAChC;IAED,MAAM,UAAU,GAAa,EAAE,CAAC;IAChC,IAAI,UAAU,GAAG,CAAC,CAAC;IACnB,KAAK,MAAM,IAAI,IAAI,SAAS,EAAE;QAC5B,MAAM,KAAK,GAAG,EAAE;aACb,YAAY,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,MAAM,CAAC;aAC9C,KAAK,CAAC,OAAO,CAAC,CAAC;QAClB,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QACrC,UAAU,IAAI,KAAK,CAAC;QACpB,UAAU,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;KACpC;IAED,MAAM,cAAc,GAAG,IAAI,CAAC,OAAO,CACjC,MAAM,CAAC,OAAO,EACd,2BAA2B,CAC5B,CAAC;IACF,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;IAC3D,MAAM,kBAAkB,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;IACzE,MAAM,cAAc,GAAG;QACrB,cAAc;QACd,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QACvB,GAAG,UAAU;KACd,CAAC;IAEF,IAAI,eAAe,EAAE;QACnB,GAAG,CAAC,8BAA8B,CAAC,GAAG,kBAAkB,CAAC;QACzD,OAAO,IAAI,CAAC,CAAC;KACd;IAED,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;IAElD,uDAAuD;IACvD,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAC7B,MAAM,CAAC,YAAY,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;IAChC,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACnC,MAAM,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACjB,MAAM,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACnB,MAAM,UAAU,GAAG,IAAI,MAAM,CAAC,GAAG,GAAG,IAAI,KAAK,IAAI,EAAE,MAAM,CAAC,CAAC;QAC3D,MAAM,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACnC,UAAU,CAAC,YAAY,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC9C,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC,CAAC;KAC1D;IACD,iCAAiC;IACjC,MAAM,OAAO,GAAG,GAAG,IAAI,cAAc,CAAC;IACtC,EAAE,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAElC,OAAO,EAAE,GAAG,EAAE,IAAI,EAAE,CAAC;AACvB,CAAC;AAvFD,kDAuFC;AAEM,KAAK,UAAU,uBAAuB,CAC3C,MAA0B,EAC1B,MAAc;IAEd,kEAAkE;IAClE,MAAM,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;IAClE,IAAI,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;QAChC,OAAO,SAAS,CAAC;KAClB;IAED,uDAAuD;IACvD,MAAM,qBAAqB,GAAqC,EAAE,CAAC;IACnE,KAAK,MAAM,QAAQ,IAAI,eAAe,EAAE;QACtC,qBAAqB,CAAC,QAAQ,CAAC,GAAG,MAAM,0BAA0B,CAChE,MAAM,EACN,MAAM,EACN,QAAQ,CACT,CAAC;KACH;IACD,MAAM,gBAAgB,GAAG,mBAAmB,CAAC,qBAAqB,EAAE,MAAM,CAAC,CAAC;IAE5E,8BAA8B;IAC9B,gBAAgB,CAAC,GAAG,CAAC,4BAA4B,CAAC,GAAG,gBAAgB,CAAC,IAAI,CAAC;IAC3E,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC;IACjD,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACjC,gBAAgB,CAAC,GAAG,CAAC,uBAAuB,CAAC,GAAG,IAAI,CAAC,IAAI,CACvD,SAAS,EACT,OAAO,EACP,OAAO,EACP,gBAAgB,CACjB,CAAC;KACH;SAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QACvC,gBAAgB,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI,CAC5C,SAAS,EACT,OAAO,EACP,SAAS,EACT,gBAAgB,CACjB,CAAC;KACH;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAzCD,0DAyCC"}

View File

@@ -28,249 +28,249 @@ function getTestConfig(tmpDir) {
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: '',
codeQLCmd: "",
};
}
// A very minimal setup
ava_1.default('getTracerConfigForLanguage - minimal setup', async (t) => {
ava_1.default("getTracerConfigForLanguage - minimal setup", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const codeQL = codeql_1.setCodeQL({
getTracerEnv: async function () {
async getTracerEnv() {
return {
'ODASA_TRACER_CONFIGURATION': 'abc',
'foo': 'bar'
ODASA_TRACER_CONFIGURATION: "abc",
foo: "bar",
};
},
});
const result = await tracer_config_1.getTracerConfigForLanguage(codeQL, config, languages_1.Language.javascript);
t.deepEqual(result, { spec: 'abc', env: { 'foo': 'bar' } });
t.deepEqual(result, { spec: "abc", env: { foo: "bar" } });
});
});
// Existing vars should not be overwritten, unless they are critical or prefixed with CODEQL_
ava_1.default('getTracerConfigForLanguage - existing / critical vars', async (t) => {
ava_1.default("getTracerConfigForLanguage - existing / critical vars", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
// Set up some variables in the environment
process.env['foo'] = 'abc';
process.env['SEMMLE_PRELOAD_libtrace'] = 'abc';
process.env['SEMMLE_RUNNER'] = 'abc';
process.env['SEMMLE_COPY_EXECUTABLES_ROOT'] = 'abc';
process.env['SEMMLE_DEPTRACE_SOCKET'] = 'abc';
process.env['SEMMLE_JAVA_TOOL_OPTIONS'] = 'abc';
process.env['SEMMLE_DEPTRACE_SOCKET'] = 'abc';
process.env['CODEQL_VAR'] = 'abc';
process.env["foo"] = "abc";
process.env["SEMMLE_PRELOAD_libtrace"] = "abc";
process.env["SEMMLE_RUNNER"] = "abc";
process.env["SEMMLE_COPY_EXECUTABLES_ROOT"] = "abc";
process.env["SEMMLE_DEPTRACE_SOCKET"] = "abc";
process.env["SEMMLE_JAVA_TOOL_OPTIONS"] = "abc";
process.env["SEMMLE_DEPTRACE_SOCKET"] = "abc";
process.env["CODEQL_VAR"] = "abc";
// Now CodeQL returns all these variables, and one more, with different values
const codeQL = codeql_1.setCodeQL({
getTracerEnv: async function () {
async getTracerEnv() {
return {
'ODASA_TRACER_CONFIGURATION': 'abc',
'foo': 'bar',
'baz': 'qux',
'SEMMLE_PRELOAD_libtrace': 'SEMMLE_PRELOAD_libtrace',
'SEMMLE_RUNNER': 'SEMMLE_RUNNER',
'SEMMLE_COPY_EXECUTABLES_ROOT': 'SEMMLE_COPY_EXECUTABLES_ROOT',
'SEMMLE_DEPTRACE_SOCKET': 'SEMMLE_DEPTRACE_SOCKET',
'SEMMLE_JAVA_TOOL_OPTIONS': 'SEMMLE_JAVA_TOOL_OPTIONS',
'CODEQL_VAR': 'CODEQL_VAR',
ODASA_TRACER_CONFIGURATION: "abc",
foo: "bar",
baz: "qux",
SEMMLE_PRELOAD_libtrace: "SEMMLE_PRELOAD_libtrace",
SEMMLE_RUNNER: "SEMMLE_RUNNER",
SEMMLE_COPY_EXECUTABLES_ROOT: "SEMMLE_COPY_EXECUTABLES_ROOT",
SEMMLE_DEPTRACE_SOCKET: "SEMMLE_DEPTRACE_SOCKET",
SEMMLE_JAVA_TOOL_OPTIONS: "SEMMLE_JAVA_TOOL_OPTIONS",
CODEQL_VAR: "CODEQL_VAR",
};
},
});
const result = await tracer_config_1.getTracerConfigForLanguage(codeQL, config, languages_1.Language.javascript);
t.deepEqual(result, {
spec: 'abc',
spec: "abc",
env: {
// Should contain all variables except 'foo', because that already existed in the
// environment with a different value, and is not deemed a "critical" variable.
'baz': 'qux',
'SEMMLE_PRELOAD_libtrace': 'SEMMLE_PRELOAD_libtrace',
'SEMMLE_RUNNER': 'SEMMLE_RUNNER',
'SEMMLE_COPY_EXECUTABLES_ROOT': 'SEMMLE_COPY_EXECUTABLES_ROOT',
'SEMMLE_DEPTRACE_SOCKET': 'SEMMLE_DEPTRACE_SOCKET',
'SEMMLE_JAVA_TOOL_OPTIONS': 'SEMMLE_JAVA_TOOL_OPTIONS',
'CODEQL_VAR': 'CODEQL_VAR',
}
baz: "qux",
SEMMLE_PRELOAD_libtrace: "SEMMLE_PRELOAD_libtrace",
SEMMLE_RUNNER: "SEMMLE_RUNNER",
SEMMLE_COPY_EXECUTABLES_ROOT: "SEMMLE_COPY_EXECUTABLES_ROOT",
SEMMLE_DEPTRACE_SOCKET: "SEMMLE_DEPTRACE_SOCKET",
SEMMLE_JAVA_TOOL_OPTIONS: "SEMMLE_JAVA_TOOL_OPTIONS",
CODEQL_VAR: "CODEQL_VAR",
},
});
});
});
ava_1.default('concatTracerConfigs - minimal configs correctly combined', async (t) => {
ava_1.default("concatTracerConfigs - minimal configs correctly combined", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec1 = path.join(tmpDir, 'spec1');
fs.writeFileSync(spec1, 'foo.log\n2\nabc\ndef');
const spec1 = path.join(tmpDir, "spec1");
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
const tc1 = {
spec: spec1,
env: {
'a': 'a',
'b': 'b',
}
a: "a",
b: "b",
},
};
const spec2 = path.join(tmpDir, 'spec2');
fs.writeFileSync(spec2, 'foo.log\n1\nghi');
const spec2 = path.join(tmpDir, "spec2");
fs.writeFileSync(spec2, "foo.log\n1\nghi");
const tc2 = {
spec: spec2,
env: {
'c': 'c',
}
c: "c",
},
};
const result = tracer_config_1.concatTracerConfigs({ 'javascript': tc1, 'python': tc2 }, config);
const result = tracer_config_1.concatTracerConfigs({ javascript: tc1, python: tc2 }, config);
t.deepEqual(result, {
spec: path.join(tmpDir, 'compound-spec'),
spec: path.join(tmpDir, "compound-spec"),
env: {
'a': 'a',
'b': 'b',
'c': 'c',
}
a: "a",
b: "b",
c: "c",
},
});
t.true(fs.existsSync(result.spec));
t.deepEqual(fs.readFileSync(result.spec, 'utf8'), path.join(tmpDir, 'compound-build-tracer.log') + '\n3\nabc\ndef\nghi');
t.deepEqual(fs.readFileSync(result.spec, "utf8"), `${path.join(tmpDir, "compound-build-tracer.log")}\n3\nabc\ndef\nghi`);
});
});
ava_1.default('concatTracerConfigs - conflicting env vars', async (t) => {
ava_1.default("concatTracerConfigs - conflicting env vars", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec = path.join(tmpDir, 'spec');
fs.writeFileSync(spec, 'foo.log\n0');
const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n0");
// Ok if env vars have the same name and the same value
t.deepEqual(tracer_config_1.concatTracerConfigs({
'javascript': { spec: spec, env: { 'a': 'a', 'b': 'b' } },
'python': { spec: spec, env: { 'b': 'b', 'c': 'c' } },
javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { b: "b", c: "c" } },
}, config).env, {
'a': 'a',
'b': 'b',
'c': 'c',
a: "a",
b: "b",
c: "c",
});
// Throws if env vars have same name but different values
const e = t.throws(() => tracer_config_1.concatTracerConfigs({
'javascript': { spec: spec, env: { 'a': 'a', 'b': 'b' } },
'python': { spec: spec, env: { 'b': 'c' } },
javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { b: "c" } },
}, config));
t.deepEqual(e.message, 'Incompatible values in environment parameter b: b and c');
t.deepEqual(e.message, "Incompatible values in environment parameter b: b and c");
});
});
ava_1.default('concatTracerConfigs - cpp spec lines come last if present', async (t) => {
ava_1.default("concatTracerConfigs - cpp spec lines come last if present", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec1 = path.join(tmpDir, 'spec1');
fs.writeFileSync(spec1, 'foo.log\n2\nabc\ndef');
const spec1 = path.join(tmpDir, "spec1");
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
const tc1 = {
spec: spec1,
env: {
'a': 'a',
'b': 'b',
}
a: "a",
b: "b",
},
};
const spec2 = path.join(tmpDir, 'spec2');
fs.writeFileSync(spec2, 'foo.log\n1\nghi');
const spec2 = path.join(tmpDir, "spec2");
fs.writeFileSync(spec2, "foo.log\n1\nghi");
const tc2 = {
spec: spec2,
env: {
'c': 'c',
}
c: "c",
},
};
const result = tracer_config_1.concatTracerConfigs({ 'cpp': tc1, 'python': tc2 }, config);
const result = tracer_config_1.concatTracerConfigs({ cpp: tc1, python: tc2 }, config);
t.deepEqual(result, {
spec: path.join(tmpDir, 'compound-spec'),
spec: path.join(tmpDir, "compound-spec"),
env: {
'a': 'a',
'b': 'b',
'c': 'c',
}
a: "a",
b: "b",
c: "c",
},
});
t.true(fs.existsSync(result.spec));
t.deepEqual(fs.readFileSync(result.spec, 'utf8'), path.join(tmpDir, 'compound-build-tracer.log') + '\n3\nghi\nabc\ndef');
t.deepEqual(fs.readFileSync(result.spec, "utf8"), `${path.join(tmpDir, "compound-build-tracer.log")}\n3\nghi\nabc\ndef`);
});
});
ava_1.default('concatTracerConfigs - SEMMLE_COPY_EXECUTABLES_ROOT is updated to point to compound spec', async (t) => {
ava_1.default("concatTracerConfigs - SEMMLE_COPY_EXECUTABLES_ROOT is updated to point to compound spec", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec = path.join(tmpDir, 'spec');
fs.writeFileSync(spec, 'foo.log\n0');
const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n0");
const result = tracer_config_1.concatTracerConfigs({
'javascript': { spec: spec, env: { 'a': 'a', 'b': 'b' } },
'python': { spec: spec, env: { 'SEMMLE_COPY_EXECUTABLES_ROOT': 'foo' } },
javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { SEMMLE_COPY_EXECUTABLES_ROOT: "foo" } },
}, config);
t.deepEqual(result.env, {
'a': 'a',
'b': 'b',
'SEMMLE_COPY_EXECUTABLES_ROOT': path.join(tmpDir, 'compound-temp')
a: "a",
b: "b",
SEMMLE_COPY_EXECUTABLES_ROOT: path.join(tmpDir, "compound-temp"),
});
});
});
ava_1.default('concatTracerConfigs - compound environment file is created correctly', async (t) => {
ava_1.default("concatTracerConfigs - compound environment file is created correctly", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec1 = path.join(tmpDir, 'spec1');
fs.writeFileSync(spec1, 'foo.log\n2\nabc\ndef');
const spec1 = path.join(tmpDir, "spec1");
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
const tc1 = {
spec: spec1,
env: {
'a': 'a',
}
a: "a",
},
};
const spec2 = path.join(tmpDir, 'spec2');
fs.writeFileSync(spec2, 'foo.log\n1\nghi');
const spec2 = path.join(tmpDir, "spec2");
fs.writeFileSync(spec2, "foo.log\n1\nghi");
const tc2 = {
spec: spec2,
env: {
'foo': 'bar_baz',
}
foo: "bar_baz",
},
};
const result = tracer_config_1.concatTracerConfigs({ 'javascript': tc1, 'python': tc2 }, config);
const envPath = result.spec + '.environment';
const result = tracer_config_1.concatTracerConfigs({ javascript: tc1, python: tc2 }, config);
const envPath = `${result.spec}.environment`;
t.true(fs.existsSync(envPath));
const buffer = fs.readFileSync(envPath);
// Contents is binary data
t.deepEqual(buffer.length, 28);
t.deepEqual(buffer.readInt32LE(0), 2); // number of env vars
t.deepEqual(buffer.readInt32LE(4), 4); // length of env var definition
t.deepEqual(buffer.toString('utf8', 8, 12), 'a=a\0'); // [key]=[value]\0
t.deepEqual(buffer.toString("utf8", 8, 12), "a=a\0"); // [key]=[value]\0
t.deepEqual(buffer.readInt32LE(12), 12); // length of env var definition
t.deepEqual(buffer.toString('utf8', 16, 28), 'foo=bar_baz\0'); // [key]=[value]\0
t.deepEqual(buffer.toString("utf8", 16, 28), "foo=bar_baz\0"); // [key]=[value]\0
});
});
ava_1.default('getCombinedTracerConfig - return undefined when no languages are traced languages', async (t) => {
ava_1.default("getCombinedTracerConfig - return undefined when no languages are traced languages", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
// No traced languages
config.languages = [languages_1.Language.javascript, languages_1.Language.python];
const codeQL = codeql_1.setCodeQL({
getTracerEnv: async function () {
async getTracerEnv() {
return {
'ODASA_TRACER_CONFIGURATION': 'abc',
'foo': 'bar'
ODASA_TRACER_CONFIGURATION: "abc",
foo: "bar",
};
},
});
t.deepEqual(await tracer_config_1.getCombinedTracerConfig(config, codeQL), undefined);
});
});
ava_1.default('getCombinedTracerConfig - valid spec file', async (t) => {
ava_1.default("getCombinedTracerConfig - valid spec file", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec = path.join(tmpDir, 'spec');
fs.writeFileSync(spec, 'foo.log\n2\nabc\ndef');
const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n2\nabc\ndef");
const codeQL = codeql_1.setCodeQL({
getTracerEnv: async function () {
async getTracerEnv() {
return {
'ODASA_TRACER_CONFIGURATION': spec,
'foo': 'bar',
ODASA_TRACER_CONFIGURATION: spec,
foo: "bar",
};
},
});
const result = await tracer_config_1.getCombinedTracerConfig(config, codeQL);
const expectedEnv = {
'foo': 'bar',
'ODASA_TRACER_CONFIGURATION': result.spec,
foo: "bar",
ODASA_TRACER_CONFIGURATION: result.spec,
};
if (process.platform === 'darwin') {
expectedEnv['DYLD_INSERT_LIBRARIES'] = path.join(path.dirname(codeQL.getPath()), 'tools', 'osx64', 'libtrace.dylib');
if (process.platform === "darwin") {
expectedEnv["DYLD_INSERT_LIBRARIES"] = path.join(path.dirname(codeQL.getPath()), "tools", "osx64", "libtrace.dylib");
}
else if (process.platform !== 'win32') {
expectedEnv['LD_PRELOAD'] = path.join(path.dirname(codeQL.getPath()), 'tools', 'linux64', '${LIB}trace.so');
else if (process.platform !== "win32") {
expectedEnv["LD_PRELOAD"] = path.join(path.dirname(codeQL.getPath()), "tools", "linux64", "${LIB}trace.so");
}
t.deepEqual(result, {
spec: path.join(tmpDir, 'compound-spec'),
spec: path.join(tmpDir, "compound-spec"),
env: expectedEnv,
});
});

File diff suppressed because one or more lines are too long

102
lib/upload-lib.js generated
View File

@@ -23,18 +23,18 @@ const util = __importStar(require("./util"));
// Takes a list of paths to sarif files and combines them together,
// returning the contents of the combined sarif file.
function combineSarifFiles(sarifFiles) {
let combinedSarif = {
const combinedSarif = {
version: null,
runs: []
runs: [],
};
for (let sarifFile of sarifFiles) {
let sarifObject = JSON.parse(fs.readFileSync(sarifFile, 'utf8'));
for (const sarifFile of sarifFiles) {
const sarifObject = JSON.parse(fs.readFileSync(sarifFile, "utf8"));
// Check SARIF version
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
}
else if (combinedSarif.version !== sarifObject.version) {
throw "Different SARIF versions encountered: " + combinedSarif.version + " and " + sarifObject.version;
throw `Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`;
}
combinedSarif.runs.push(...sarifObject.runs);
}
@@ -44,9 +44,9 @@ exports.combineSarifFiles = combineSarifFiles;
// Upload the given payload.
// If the request fails then this will retry a small number of times.
async function uploadPayload(payload, repositoryNwo, githubAuth, githubUrl, mode, logger) {
logger.info('Uploading results');
logger.info("Uploading results");
// If in test mode we don't want to upload the results
const testMode = process.env['TEST_MODE'] === 'true' || false;
const testMode = process.env["TEST_MODE"] === "true" || false;
if (testMode) {
return;
}
@@ -57,15 +57,15 @@ async function uploadPayload(payload, repositoryNwo, githubAuth, githubUrl, mode
const backoffPeriods = [1, 5, 15];
const client = api.getApiClient(githubAuth, githubUrl);
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
const reqURL = mode === 'actions'
? 'PUT /repos/:owner/:repo/code-scanning/analysis'
: 'POST /repos/:owner/:repo/code-scanning/sarifs';
const response = await client.request(reqURL, ({
const reqURL = mode === "actions"
? "PUT /repos/:owner/:repo/code-scanning/analysis"
: "POST /repos/:owner/:repo/code-scanning/sarifs";
const response = await client.request(reqURL, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
data: payload,
}));
logger.debug('response status: ' + response.status);
});
logger.debug(`response status: ${response.status}`);
const statusCode = response.status;
if (statusCode === 202) {
logger.info("Successfully uploaded results");
@@ -74,28 +74,26 @@ async function uploadPayload(payload, repositoryNwo, githubAuth, githubUrl, mode
const requestID = response.headers["x-github-request-id"];
// On any other status code that's not 5xx mark the upload as failed
if (!statusCode || statusCode < 500 || statusCode >= 600) {
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
throw new Error(`Upload failed (${requestID}): (${statusCode}) ${JSON.stringify(response.data)}`);
}
// On a 5xx status code we may retry the request
if (attempt < backoffPeriods.length) {
// Log the failure as a warning but don't mark the action as failed yet
logger.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
' seconds: (' + statusCode + ') ' + JSON.stringify(response.data));
logger.warning(`Upload attempt (${attempt + 1} of ${backoffPeriods.length + 1}) failed (${requestID}). Retrying in ${backoffPeriods[attempt]} seconds: (${statusCode}) ${JSON.stringify(response.data)}`);
// Sleep for the backoff period
await new Promise(r => setTimeout(r, backoffPeriods[attempt] * 1000));
await new Promise((r) => setTimeout(r, backoffPeriods[attempt] * 1000));
continue;
}
else {
// If the upload fails with 5xx then we assume it is a temporary problem
// and not an error that the user has caused or can fix.
// We avoid marking the job as failed to avoid breaking CI workflows.
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
throw new Error(`Upload failed (${requestID}): (${statusCode}) ${JSON.stringify(response.data)}`);
}
}
// This case shouldn't ever happen as the final iteration of the loop
// will always throw an error instead of exiting to here.
throw new Error('Upload failed');
throw new Error("Upload failed");
}
// Uploads a single sarif file or a directory of sarif files
// depending on what the path happens to refer to.
@@ -107,11 +105,11 @@ async function upload(sarifPath, repositoryNwo, commitOid, ref, analysisKey, ana
}
if (fs.lstatSync(sarifPath).isDirectory()) {
fs.readdirSync(sarifPath)
.filter(f => f.endsWith(".sarif"))
.map(f => path.resolve(sarifPath, f))
.forEach(f => sarifFiles.push(f));
.filter((f) => f.endsWith(".sarif"))
.map((f) => path.resolve(sarifPath, f))
.forEach((f) => sarifFiles.push(f));
if (sarifFiles.length === 0) {
throw new Error("No SARIF files found to upload in \"" + sarifPath + "\".");
throw new Error(`No SARIF files found to upload in "${sarifPath}".`);
}
}
else {
@@ -132,28 +130,28 @@ exports.countResultsInSarif = countResultsInSarif;
// Validates that the given file path refers to a valid SARIF file.
// Throws an error if the file is invalid.
function validateSarifFileSchema(sarifFilePath, logger) {
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
const schema = require('../src/sarif_v2.1.0_schema.json');
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8"));
const schema = require("../src/sarif_v2.1.0_schema.json");
const result = new jsonschema.Validator().validate(sarif, schema);
if (!result.valid) {
// Output the more verbose error messages in groups as these may be very large.
for (const error of result.errors) {
logger.startGroup("Error details: " + error.stack);
logger.startGroup(`Error details: ${error.stack}`);
logger.info(JSON.stringify(error, null, 2));
logger.endGroup();
}
// Set the main error message to the stacks of all the errors.
// This should be of a manageable size and may even give enough to fix the error.
const sarifErrors = result.errors.map(e => "- " + e.stack);
throw new Error("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + sarifErrors.join("\n"));
const sarifErrors = result.errors.map((e) => `- ${e.stack}`);
throw new Error(`Unable to upload "${sarifFilePath}" as it is not valid SARIF:\n${sarifErrors.join("\n")}`);
}
}
exports.validateSarifFileSchema = validateSarifFileSchema;
// Uploads the given set of sarif files.
// Returns true iff the upload occurred and succeeded
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger) {
logger.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
if (mode === 'actions') {
logger.info(`Uploading sarif files: ${JSON.stringify(sarifFiles)}`);
if (mode === "actions") {
// This check only works on actions as env vars don't persist between calls to the runner
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
if (process.env[sentinelEnvVar]) {
@@ -167,40 +165,40 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
}
let sarifPayload = combineSarifFiles(sarifFiles);
sarifPayload = fingerprints.addFingerprints(sarifPayload, checkoutPath, logger);
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
let checkoutURI = file_url_1.default(checkoutPath);
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = file_url_1.default(checkoutPath);
const toolNames = util.getToolNames(sarifPayload);
let payload;
if (mode === 'actions') {
if (mode === "actions") {
payload = JSON.stringify({
"commit_oid": commitOid,
"ref": ref,
"analysis_key": analysisKey,
"analysis_name": analysisName,
"sarif": zipped_sarif,
"workflow_run_id": workflowRunID,
"checkout_uri": checkoutURI,
"environment": environment,
"started_at": process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT],
"tool_names": toolNames,
commit_oid: commitOid,
ref,
analysis_key: analysisKey,
analysis_name: analysisName,
sarif: zipped_sarif,
workflow_run_id: workflowRunID,
checkout_uri: checkoutURI,
environment,
started_at: process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT],
tool_names: toolNames,
});
}
else {
payload = JSON.stringify({
"commit_sha": commitOid,
"ref": ref,
"sarif": zipped_sarif,
"checkout_uri": checkoutURI,
"tool_name": toolNames[0],
commit_sha: commitOid,
ref,
sarif: zipped_sarif,
checkout_uri: checkoutURI,
tool_name: toolNames[0],
});
}
// Log some useful debug info about the info
const rawUploadSizeBytes = sarifPayload.length;
logger.debug("Raw upload size: " + rawUploadSizeBytes + " bytes");
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
const zippedUploadSizeBytes = zipped_sarif.length;
logger.debug("Base64 zipped upload size: " + zippedUploadSizeBytes + " bytes");
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
const numResultInSarif = countResultsInSarif(sarifPayload);
logger.debug("Number of results in upload: " + numResultInSarif);
logger.debug(`Number of results in upload: ${numResultInSarif}`);
// Make the upload
await uploadPayload(payload, repositoryNwo, githubAuth, githubUrl, mode, logger);
return {

File diff suppressed because one or more lines are too long

View File

@@ -15,12 +15,12 @@ const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const uploadLib = __importStar(require("./upload-lib"));
testing_utils_1.setupTests(ava_1.default);
ava_1.default('validateSarifFileSchema - valid', t => {
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
ava_1.default("validateSarifFileSchema - valid", (t) => {
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getRunnerLogger(true)));
});
ava_1.default('validateSarifFileSchema - invalid', t => {
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
ava_1.default("validateSarifFileSchema - invalid", (t) => {
const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`;
t.throws(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getRunnerLogger(true)));
});
//# sourceMappingURL=upload-lib.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,uCAA4C;AAC5C,mDAA2C;AAC3C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACzF,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACtF,CAAC,CAAC,CAAC"}
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,uCAA4C;AAC5C,mDAA6C;AAC7C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,SAAS,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CACf,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC9C,MAAM,SAAS,GAAG,GAAG,SAAS,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC"}

View File

@@ -13,7 +13,7 @@ const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
async function sendSuccessStatusReport(startedAt, uploadStats) {
const statusReportBase = await util.createStatusReportBase('upload-sarif', 'success', startedAt);
const statusReportBase = await util.createStatusReportBase("upload-sarif", "success", startedAt);
const statusReport = {
...statusReportBase,
...uploadStats,
@@ -22,22 +22,22 @@ async function sendSuccessStatusReport(startedAt, uploadStats) {
}
async function run() {
const startedAt = new Date();
if (!await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'starting', startedAt), true)) {
if (!(await util.sendStatusReport(await util.createStatusReportBase("upload-sarif", "starting", startedAt), true))) {
return;
}
try {
const uploadStats = await upload_lib.upload(core.getInput('sarif_file'), repository_1.parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')), await util.getCommitOid(), util.getRef(), await util.getAnalysisKey(), util.getRequiredEnvParam('GITHUB_WORKFLOW'), util.getWorkflowRunID(), core.getInput('checkout_path'), core.getInput('matrix'), core.getInput('token'), util.getRequiredEnvParam('GITHUB_SERVER_URL'), 'actions', logging_1.getActionsLogger());
const uploadStats = await upload_lib.upload(core.getInput("sarif_file"), repository_1.parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await util.getCommitOid(), util.getRef(), await util.getAnalysisKey(), util.getRequiredEnvParam("GITHUB_WORKFLOW"), util.getWorkflowRunID(), core.getInput("checkout_path"), core.getInput("matrix"), core.getInput("token"), util.getRequiredEnvParam("GITHUB_SERVER_URL"), "actions", logging_1.getActionsLogger());
await sendSuccessStatusReport(startedAt, uploadStats);
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'failure', startedAt, error.message, error.stack));
await util.sendStatusReport(await util.createStatusReportBase("upload-sarif", "failure", startedAt, error.message, error.stack));
return;
}
}
run().catch(e => {
core.setFailed("codeql/upload-sarif action failed: " + e);
run().catch((e) => {
core.setFailed(`codeql/upload-sarif action failed: ${e}`);
console.log(e);
});
//# sourceMappingURL=upload-sarif-action.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAI/B,KAAK,UAAU,uBAAuB,CAAC,SAAe,EAAE,WAA0C;IAChG,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,cAAc,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IACjG,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAI,WAAW;KAChB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,cAAc,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;QAChH,OAAO;KACR;IAED,IAAI;QACF,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,EAC3B,+BAAkB,CAAC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACjE,MAAM,IAAI,CAAC,YAAY,EAAE,EACzB,IAAI,CAAC,MAAM,EAAE,EACb,MAAM,IAAI,CAAC,cAAc,EAAE,EAC3B,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAC3C,IAAI,CAAC,gBAAgB,EAAE,EACvB,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC,EAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EACvB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,SAAS,EACT,0BAAgB,EAAE,CAAC,CAAC;QACtB,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KAEvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAC3D,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;QAChB,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAM/B,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAC3B,MAAM,IAAI,CAAC,sBAAsB,CAAC,cAAc,EAAE,UAAU,EAAE,SAAS,CAAC,EACxE,IAAI,CACL,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,EAC3B,+BAAkB,CAAC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACjE,MAAM,IAAI,CAAC,YAAY,EAAE,EACzB,IAAI,CAAC,MAAM,EAAE,EACb,MAAM,IAAI,CAAC,cAAc,EAAE,EAC3B,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAC3C,IAAI,CAAC,gBAAgB,EAAE,EACvB,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC,EAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EACvB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,SAAS,EACT,0BAAgB,EAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAI,CAAC,gBAAgB,CACzB,MAAM,IAAI,CAAC,sBAAsB,CAC/B,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,sCAAsC,CAAC,EAAE,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

108
lib/util.js generated
View File

@@ -24,9 +24,9 @@ exports.GITHUB_DOTCOM_URL = "https://github.com";
function getRequiredEnvParam(paramName) {
const value = process.env[paramName];
if (value === undefined || value.length === 0) {
throw new Error(paramName + ' environment variable must be set');
throw new Error(`${paramName} environment variable must be set`);
}
core.debug(paramName + '=' + value);
core.debug(`${paramName}=${value}`);
return value;
}
exports.getRequiredEnvParam = getRequiredEnvParam;
@@ -34,7 +34,7 @@ exports.getRequiredEnvParam = getRequiredEnvParam;
* Get the extra options for the codeql commands.
*/
function getExtraOptionsEnvParam() {
const varName = 'CODEQL_ACTION_EXTRA_OPTIONS';
const varName = "CODEQL_ACTION_EXTRA_OPTIONS";
const raw = process.env[varName];
if (raw === undefined || raw.length === 0) {
return {};
@@ -43,16 +43,14 @@ function getExtraOptionsEnvParam() {
return JSON.parse(raw);
}
catch (e) {
throw new Error(varName +
' environment variable is set, but does not contain valid JSON: ' +
e.message);
throw new Error(`${varName} environment variable is set, but does not contain valid JSON: ${e.message}`);
}
}
exports.getExtraOptionsEnvParam = getExtraOptionsEnvParam;
function isLocalRun() {
return !!process.env.CODEQL_LOCAL_RUN
&& process.env.CODEQL_LOCAL_RUN !== 'false'
&& process.env.CODEQL_LOCAL_RUN !== '0';
return (!!process.env.CODEQL_LOCAL_RUN &&
process.env.CODEQL_LOCAL_RUN !== "false" &&
process.env.CODEQL_LOCAL_RUN !== "0");
}
exports.isLocalRun = isLocalRun;
/**
@@ -62,9 +60,9 @@ function prepareLocalRunEnvironment() {
if (!isLocalRun()) {
return;
}
core.debug('Action is running locally.');
core.debug("Action is running locally.");
if (!process.env.GITHUB_JOB) {
core.exportVariable('GITHUB_JOB', 'UNKNOWN-JOB');
core.exportVariable("GITHUB_JOB", "UNKNOWN-JOB");
}
}
exports.prepareLocalRunEnvironment = prepareLocalRunEnvironment;
@@ -80,19 +78,23 @@ async function getCommitOid() {
// Even if this does go wrong, it's not a huge problem for the alerts to
// reported on the merge commit.
try {
let commitOid = '';
await new toolrunnner.ToolRunner('git', ['rev-parse', 'HEAD'], {
let commitOid = "";
await new toolrunnner.ToolRunner("git", ["rev-parse", "HEAD"], {
silent: true,
listeners: {
stdout: (data) => { commitOid += data.toString(); },
stderr: (data) => { process.stderr.write(data); }
}
stdout: (data) => {
commitOid += data.toString();
},
stderr: (data) => {
process.stderr.write(data);
},
},
}).exec();
return commitOid.trim();
}
catch (e) {
core.info("Failed to call git to get current commit. Continuing with data from environment: " + e);
return getRequiredEnvParam('GITHUB_SHA');
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
return getRequiredEnvParam("GITHUB_SHA");
}
}
exports.getCommitOid = getCommitOid;
@@ -100,27 +102,27 @@ exports.getCommitOid = getCommitOid;
* Get the path of the currently executing workflow.
*/
async function getWorkflowPath() {
const repo_nwo = getRequiredEnvParam('GITHUB_REPOSITORY').split("/");
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
const owner = repo_nwo[0];
const repo = repo_nwo[1];
const run_id = Number(getRequiredEnvParam('GITHUB_RUN_ID'));
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
const apiClient = api.getActionsApiClient();
const runsResponse = await apiClient.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
owner,
repo,
run_id
run_id,
});
const workflowUrl = runsResponse.data.workflow_url;
const workflowResponse = await apiClient.request('GET ' + workflowUrl);
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
return workflowResponse.data.path;
}
/**
* Get the workflow run ID.
*/
function getWorkflowRunID() {
const workflowRunID = parseInt(getRequiredEnvParam('GITHUB_RUN_ID'), 10);
const workflowRunID = parseInt(getRequiredEnvParam("GITHUB_RUN_ID"), 10);
if (Number.isNaN(workflowRunID)) {
throw new Error('GITHUB_RUN_ID must define a non NaN workflow run ID');
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
}
return workflowRunID;
}
@@ -133,14 +135,14 @@ exports.getWorkflowRunID = getWorkflowRunID;
* the github API, but after that the result will be cached.
*/
async function getAnalysisKey() {
const analysisKeyEnvVar = 'CODEQL_ACTION_ANALYSIS_KEY';
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
let analysisKey = process.env[analysisKeyEnvVar];
if (analysisKey !== undefined) {
return analysisKey;
}
const workflowPath = await getWorkflowPath();
const jobName = getRequiredEnvParam('GITHUB_JOB');
analysisKey = workflowPath + ':' + jobName;
const jobName = getRequiredEnvParam("GITHUB_JOB");
analysisKey = `${workflowPath}:${jobName}`;
core.exportVariable(analysisKeyEnvVar, analysisKey);
return analysisKey;
}
@@ -151,14 +153,14 @@ exports.getAnalysisKey = getAnalysisKey;
function getRef() {
// Will be in the form "refs/heads/master" on a push event
// or in the form "refs/pull/N/merge" on a pull_request event
const ref = getRequiredEnvParam('GITHUB_REF');
const ref = getRequiredEnvParam("GITHUB_REF");
// For pull request refs we want to convert from the 'merge' ref
// to the 'head' ref, as that is what we want to analyse.
// There should have been some code earlier in the workflow to do
// the checkout, but we have no way of verifying that here.
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
if (pull_ref_regex.test(ref)) {
return ref.replace(pull_ref_regex, 'refs/pull/$1/head');
return ref.replace(pull_ref_regex, "refs/pull/$1/head");
}
else {
return ref;
@@ -175,33 +177,33 @@ exports.getRef = getRef;
* @param exception Exception (only supply if status is 'failure')
*/
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
const commitOid = process.env['GITHUB_SHA'] || '';
const commitOid = process.env["GITHUB_SHA"] || "";
const ref = getRef();
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
let workflowRunID = -1;
if (workflowRunIDStr) {
workflowRunID = parseInt(workflowRunIDStr, 10);
}
const workflowName = process.env['GITHUB_WORKFLOW'] || '';
const jobName = process.env['GITHUB_JOB'] || '';
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
const jobName = process.env["GITHUB_JOB"] || "";
const analysis_key = await getAnalysisKey();
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
if (workflowStartedAt === undefined) {
workflowStartedAt = actionStartedAt.toISOString();
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
}
let statusReport = {
const statusReport = {
workflow_run_id: workflowRunID,
workflow_name: workflowName,
job_name: jobName,
analysis_key: analysis_key,
analysis_key,
commit_oid: commitOid,
ref: ref,
ref,
action_name: actionName,
action_oid: "unknown",
started_at: workflowStartedAt,
action_started_at: actionStartedAt.toISOString(),
status: status
status,
};
// Add optional parameters
if (cause) {
@@ -210,10 +212,10 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
if (exception) {
statusReport.exception = exception;
}
if (status === 'success' || status === 'failure' || status === 'aborted') {
if (status === "success" || status === "failure" || status === "aborted") {
statusReport.completed_at = new Date().toISOString();
}
let matrix = core.getInput('matrix');
const matrix = core.getInput("matrix");
if (matrix) {
statusReport.matrix_vars = matrix;
}
@@ -239,13 +241,13 @@ async function sendStatusReport(statusReport, ignoreFailures) {
return true;
}
const statusReportJSON = JSON.stringify(statusReport);
core.debug('Sending status report: ' + statusReportJSON);
core.debug(`Sending status report: ${statusReportJSON}`);
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
const [owner, repo] = nwo.split("/");
const client = api.getActionsApiClient();
const statusResponse = await client.request('PUT /repos/:owner/:repo/code-scanning/analysis/status', {
owner: owner,
repo: repo,
const statusResponse = await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
owner,
repo,
data: statusReportJSON,
});
if (!ignoreFailures) {
@@ -256,11 +258,11 @@ async function sendStatusReport(statusReport, ignoreFailures) {
// Other failure responses (or lack thereof) could be transitory and should not
// cause the action to fail.
if (statusResponse.status === 403) {
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
core.setFailed("The repo on which this action is running is not opted-in to CodeQL code scanning.");
return false;
}
if (statusResponse.status === 404) {
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
core.setFailed("Not authorized to used the CodeQL code scanning feature on this repo.");
return false;
}
}
@@ -288,11 +290,11 @@ exports.getToolNames = getToolNames;
// Creates a random temporary directory, runs the given body, and then deletes the directory.
// Mostly intended for use within tests.
async function withTmpDir(body) {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
const realSubdir = path.join(tmpDir, 'real');
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "codeql-action-"));
const realSubdir = path.join(tmpDir, "real");
fs.mkdirSync(realSubdir);
const symlinkSubdir = path.join(tmpDir, 'symlink');
fs.symlinkSync(realSubdir, symlinkSubdir, 'dir');
const symlinkSubdir = path.join(tmpDir, "symlink");
fs.symlinkSync(realSubdir, symlinkSubdir, "dir");
const result = await body(symlinkSubdir);
fs.rmdirSync(tmpDir, { recursive: true });
return result;
@@ -309,7 +311,7 @@ function getMemoryFlag(userInput) {
if (userInput) {
memoryToUseMegaBytes = Number(userInput);
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
throw new Error("Invalid RAM setting \"" + userInput + "\", specified.");
throw new Error(`Invalid RAM setting "${userInput}", specified.`);
}
}
else {
@@ -318,7 +320,7 @@ function getMemoryFlag(userInput) {
const systemReservedMemoryMegaBytes = 256;
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
}
return "--ram=" + Math.floor(memoryToUseMegaBytes);
return `--ram=${Math.floor(memoryToUseMegaBytes)}`;
}
exports.getMemoryFlag = getMemoryFlag;
/**
@@ -371,7 +373,7 @@ exports.getThreadsFlag = getThreadsFlag;
* Get the directory where CodeQL databases should be placed.
*/
function getCodeQLDatabasesDir(tempDir) {
return path.resolve(tempDir, 'codeql_databases');
return path.resolve(tempDir, "codeql_databases");
}
exports.getCodeQLDatabasesDir = getCodeQLDatabasesDir;
/**

File diff suppressed because one or more lines are too long

55
lib/util.test.js generated
View File

@@ -17,12 +17,12 @@ const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
ava_1.default('getToolNames', t => {
const input = fs.readFileSync(__dirname + '/../src/testdata/tool-names.sarif', 'utf8');
ava_1.default("getToolNames", (t) => {
const input = fs.readFileSync(`${__dirname}/../src/testdata/tool-names.sarif`, "utf8");
const toolNames = util.getToolNames(input);
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
});
ava_1.default('getMemoryFlag() should return the correct --ram flag', t => {
ava_1.default("getMemoryFlag() should return the correct --ram flag", (t) => {
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
const tests = {
"": `--ram=${totalMem - 256}`,
@@ -33,12 +33,12 @@ ava_1.default('getMemoryFlag() should return the correct --ram flag', t => {
t.deepEqual(flag, expectedFlag);
}
});
ava_1.default('getMemoryFlag() throws if the ram input is < 0 or NaN', t => {
ava_1.default("getMemoryFlag() throws if the ram input is < 0 or NaN", (t) => {
for (const input of ["-1", "hello!"]) {
t.throws(() => util.getMemoryFlag(input));
}
});
ava_1.default('getAddSnippetsFlag() should return the correct flag', t => {
ava_1.default("getAddSnippetsFlag() should return the correct flag", (t) => {
t.deepEqual(util.getAddSnippetsFlag(true), "--sarif-add-snippets");
t.deepEqual(util.getAddSnippetsFlag("true"), "--sarif-add-snippets");
t.deepEqual(util.getAddSnippetsFlag(false), "--no-sarif-add-snippets");
@@ -46,73 +46,72 @@ ava_1.default('getAddSnippetsFlag() should return the correct flag', t => {
t.deepEqual(util.getAddSnippetsFlag("false"), "--no-sarif-add-snippets");
t.deepEqual(util.getAddSnippetsFlag("foo bar"), "--no-sarif-add-snippets");
});
ava_1.default('getThreadsFlag() should return the correct --threads flag', t => {
ava_1.default("getThreadsFlag() should return the correct --threads flag", (t) => {
const numCpus = os.cpus().length;
const tests = {
"0": "--threads=0",
"1": "--threads=1",
[`${numCpus + 1}`]: `--threads=${numCpus}`,
[`${-numCpus - 1}`]: `--threads=${-numCpus}`
[`${-numCpus - 1}`]: `--threads=${-numCpus}`,
};
for (const [input, expectedFlag] of Object.entries(tests)) {
const flag = util.getThreadsFlag(input, logging_1.getRunnerLogger(true));
t.deepEqual(flag, expectedFlag);
}
});
ava_1.default('getThreadsFlag() throws if the threads input is not an integer', t => {
ava_1.default("getThreadsFlag() throws if the threads input is not an integer", (t) => {
t.throws(() => util.getThreadsFlag("hello!", logging_1.getRunnerLogger(true)));
});
ava_1.default('getRef() throws on the empty string', t => {
ava_1.default("getRef() throws on the empty string", (t) => {
process.env["GITHUB_REF"] = "";
t.throws(util.getRef);
});
ava_1.default('isLocalRun() runs correctly', t => {
ava_1.default("isLocalRun() runs correctly", (t) => {
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
process.env.CODEQL_LOCAL_RUN = '';
process.env.CODEQL_LOCAL_RUN = "";
t.assert(!util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = 'false';
process.env.CODEQL_LOCAL_RUN = "false";
t.assert(!util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = '0';
process.env.CODEQL_LOCAL_RUN = "0";
t.assert(!util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = 'true';
process.env.CODEQL_LOCAL_RUN = "true";
t.assert(util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = 'hucairz';
process.env.CODEQL_LOCAL_RUN = "hucairz";
t.assert(util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = origLocalRun;
});
ava_1.default('prepareEnvironment() when a local run', t => {
ava_1.default("prepareEnvironment() when a local run", (t) => {
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
process.env.CODEQL_LOCAL_RUN = 'false';
process.env.GITHUB_JOB = 'YYY';
process.env.CODEQL_LOCAL_RUN = "false";
process.env.GITHUB_JOB = "YYY";
util.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, 'YYY');
process.env.CODEQL_LOCAL_RUN = 'true';
t.deepEqual(process.env.GITHUB_JOB, "YYY");
process.env.CODEQL_LOCAL_RUN = "true";
util.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, 'YYY');
process.env.GITHUB_JOB = '';
t.deepEqual(process.env.GITHUB_JOB, "YYY");
process.env.GITHUB_JOB = "";
util.prepareLocalRunEnvironment();
// updated
t.deepEqual(process.env.GITHUB_JOB, 'UNKNOWN-JOB');
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
process.env.CODEQL_LOCAL_RUN = origLocalRun;
});
ava_1.default('getExtraOptionsEnvParam() succeeds on valid JSON with invalid options (for now)', t => {
ava_1.default("getExtraOptionsEnvParam() succeeds on valid JSON with invalid options (for now)", (t) => {
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
const options = { foo: 42 };
process.env.CODEQL_ACTION_EXTRA_OPTIONS = JSON.stringify(options);
t.deepEqual(util.getExtraOptionsEnvParam(), options);
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
});
ava_1.default('getExtraOptionsEnvParam() succeeds on valid options', t => {
ava_1.default("getExtraOptionsEnvParam() succeeds on valid options", (t) => {
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
const options = { database: { init: ["--debug"] } };
process.env.CODEQL_ACTION_EXTRA_OPTIONS =
JSON.stringify(options);
process.env.CODEQL_ACTION_EXTRA_OPTIONS = JSON.stringify(options);
t.deepEqual(util.getExtraOptionsEnvParam(), options);
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
});
ava_1.default('getExtraOptionsEnvParam() fails on invalid JSON', t => {
ava_1.default("getExtraOptionsEnvParam() fails on invalid JSON", (t) => {
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
process.env.CODEQL_ACTION_EXTRA_OPTIONS = "{{invalid-json}}";
t.throws(util.getExtraOptionsEnvParam);

File diff suppressed because one or more lines are too long

View File

@@ -1,13 +1,13 @@
import test from 'ava';
import test from "ava";
import * as analysisPaths from './analysis-paths';
import {setupTests} from './testing-utils';
import * as util from './util';
import * as analysisPaths from "./analysis-paths";
import { setupTests } from "./testing-utils";
import * as util from "./util";
setupTests(test);
test("emptyPaths", async t => {
return await util.withTmpDir(async tmpDir => {
test("emptyPaths", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const config = {
languages: [],
queries: {},
@@ -16,30 +16,33 @@ test("emptyPaths", async t => {
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: '',
codeQLCmd: "",
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_FILTERS'], undefined);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
t.is(process.env["LGTM_INDEX_EXCLUDE"], undefined);
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
});
});
test("nonEmptyPaths", async t => {
return await util.withTmpDir(async tmpDir => {
test("nonEmptyPaths", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const config = {
languages: [],
queries: {},
paths: ['path1', 'path2', '**/path3'],
pathsIgnore: ['path4', 'path5', 'path6/**'],
paths: ["path1", "path2", "**/path3"],
pathsIgnore: ["path4", "path5", "path6/**"],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: '',
codeQLCmd: "",
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path4\npath5');
t.is(process.env['LGTM_INDEX_FILTERS'], 'include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**');
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
t.is(process.env["LGTM_INDEX_EXCLUDE"], "path4\npath5");
t.is(
process.env["LGTM_INDEX_FILTERS"],
"include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**"
);
});
});

View File

@@ -1,8 +1,8 @@
import * as configUtils from './config-utils';
import { Logger } from './logging';
import * as configUtils from "./config-utils";
import { Logger } from "./logging";
function isInterpretedLanguage(language): boolean {
return language === 'javascript' || language === 'python';
return language === "javascript" || language === "python";
}
// Matches a string containing only characters that are legal to include in paths on windows.
@@ -11,24 +11,29 @@ export const legalWindowsPathCharactersRegex = /^[^<>:"\|?]*$/;
// Builds an environment variable suitable for LGTM_INDEX_INCLUDE or LGTM_INDEX_EXCLUDE
function buildIncludeExcludeEnvVar(paths: string[]): string {
// Ignore anything containing a *
paths = paths.filter(p => p.indexOf('*') === -1);
paths = paths.filter((p) => p.indexOf("*") === -1);
// Some characters are illegal in path names in windows
if (process.platform === 'win32') {
paths = paths.filter(p => p.match(legalWindowsPathCharactersRegex));
if (process.platform === "win32") {
paths = paths.filter((p) => p.match(legalWindowsPathCharactersRegex));
}
return paths.join('\n');
return paths.join("\n");
}
export function printPathFiltersWarning(config: configUtils.Config, logger: Logger) {
export function printPathFiltersWarning(
config: configUtils.Config,
logger: Logger
) {
// Index include/exclude/filters only work in javascript and python.
// If any other languages are detected/configured then show a warning.
if ((config.paths.length !== 0 ||
config.pathsIgnore.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) {
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
if (
(config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
!config.languages.every(isInterpretedLanguage)
) {
logger.warning(
'The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python'
);
}
}
@@ -41,19 +46,21 @@ export function includeAndExcludeAnalysisPaths(config: configUtils.Config) {
// traverse the entire file tree to determine which files are matched.
// Any paths containing "*" are not included in these.
if (config.paths.length !== 0) {
process.env['LGTM_INDEX_INCLUDE'] = buildIncludeExcludeEnvVar(config.paths);
process.env["LGTM_INDEX_INCLUDE"] = buildIncludeExcludeEnvVar(config.paths);
}
if (config.pathsIgnore.length !== 0) {
process.env['LGTM_INDEX_EXCLUDE'] = buildIncludeExcludeEnvVar(config.pathsIgnore);
process.env["LGTM_INDEX_EXCLUDE"] = buildIncludeExcludeEnvVar(
config.pathsIgnore
);
}
// The 'LGTM_INDEX_FILTERS' environment variable controls which files are
// extracted or ignored. It does not control which directories are traversed.
// This does understand the glob and double-glob syntax.
const filters: string[] = [];
filters.push(...config.paths.map(p => 'include:' + p));
filters.push(...config.pathsIgnore.map(p => 'exclude:' + p));
filters.push(...config.paths.map((p) => `include:${p}`));
filters.push(...config.pathsIgnore.map((p) => `exclude:${p}`));
if (filters.length !== 0) {
process.env['LGTM_INDEX_FILTERS'] = filters.join('\n');
process.env["LGTM_INDEX_FILTERS"] = filters.join("\n");
}
}

View File

@@ -1,20 +1,31 @@
import * as core from '@actions/core';
import * as core from "@actions/core";
import { AnalysisStatusReport, runAnalyze } from './analyze';
import { getConfig } from './config-utils';
import { getActionsLogger } from './logging';
import { parseRepositoryNwo } from './repository';
import * as util from './util';
import { AnalysisStatusReport, runAnalyze } from "./analyze";
import { getConfig } from "./config-utils";
import { getActionsLogger } from "./logging";
import { parseRepositoryNwo } from "./repository";
import * as util from "./util";
interface FinishStatusReport extends util.StatusReportBase, AnalysisStatusReport {}
interface FinishStatusReport
extends util.StatusReportBase,
AnalysisStatusReport {}
async function sendStatusReport(
startedAt: Date,
stats: AnalysisStatusReport | undefined,
error?: Error) {
const status = stats?.analyze_failure_language !== undefined || error !== undefined ? 'failure' : 'success';
const statusReportBase = await util.createStatusReportBase('finish', status, startedAt, error?.message, error?.stack);
error?: Error
) {
const status =
stats?.analyze_failure_language !== undefined || error !== undefined
? "failure"
: "success";
const statusReportBase = await util.createStatusReportBase(
"finish",
status,
startedAt,
error?.message,
error?.stack
);
const statusReport: FinishStatusReport = {
...statusReportBase,
...(stats || {}),
@@ -27,34 +38,44 @@ async function run() {
let stats: AnalysisStatusReport | undefined = undefined;
try {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('finish', 'starting', startedAt), true)) {
if (
!(await util.sendStatusReport(
await util.createStatusReportBase("finish", "starting", startedAt),
true
))
) {
return;
}
const logger = getActionsLogger();
const config = await getConfig(util.getRequiredEnvParam('RUNNER_TEMP'), logger);
const config = await getConfig(
util.getRequiredEnvParam("RUNNER_TEMP"),
logger
);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
throw new Error(
"Config file could not be found at expected location. Has the 'init' action been called?"
);
}
stats = await runAnalyze(
parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')),
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
await util.getCommitOid(),
util.getRef(),
await util.getAnalysisKey(),
util.getRequiredEnvParam('GITHUB_WORKFLOW'),
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
util.getWorkflowRunID(),
core.getInput('checkout_path'),
core.getInput('matrix'),
core.getInput('token'),
util.getRequiredEnvParam('GITHUB_SERVER_URL'),
core.getInput('upload') === 'true',
'actions',
core.getInput('output'),
util.getMemoryFlag(core.getInput('ram')),
util.getAddSnippetsFlag(core.getInput('add-snippets')),
util.getThreadsFlag(core.getInput('threads'), logger),
core.getInput("checkout_path"),
core.getInput("matrix"),
core.getInput("token"),
util.getRequiredEnvParam("GITHUB_SERVER_URL"),
core.getInput("upload") === "true",
"actions",
core.getInput("output"),
util.getMemoryFlag(core.getInput("ram")),
util.getAddSnippetsFlag(core.getInput("add-snippets")),
util.getThreadsFlag(core.getInput("threads"), logger),
config,
logger);
logger
);
} catch (error) {
core.setFailed(error.message);
console.log(error);
@@ -65,7 +86,7 @@ async function run() {
await sendStatusReport(startedAt, stats);
}
run().catch(e => {
core.setFailed("analyze action failed: " + e);
run().catch((e) => {
core.setFailed(`analyze action failed: ${e}`);
console.log(e);
});

View File

@@ -1,15 +1,15 @@
import * as fs from 'fs';
import * as path from 'path';
import * as fs from "fs";
import * as path from "path";
import * as analysisPaths from './analysis-paths';
import { getCodeQL } from './codeql';
import * as configUtils from './config-utils';
import { isScannedLanguage } from './languages';
import { Logger } from './logging';
import { RepositoryNwo } from './repository';
import * as sharedEnv from './shared-environment';
import * as upload_lib from './upload-lib';
import * as util from './util';
import * as analysisPaths from "./analysis-paths";
import { getCodeQL } from "./codeql";
import * as configUtils from "./config-utils";
import { isScannedLanguage } from "./languages";
import { Logger } from "./logging";
import { RepositoryNwo } from "./repository";
import * as sharedEnv from "./shared-environment";
import * as upload_lib from "./upload-lib";
import * as util from "./util";
export interface QueriesStatusReport {
// Time taken in ms to analyze builtin queries for cpp (or undefined if this language was not analyzed)
@@ -40,12 +40,14 @@ export interface QueriesStatusReport {
analyze_failure_language?: string;
}
export interface AnalysisStatusReport extends upload_lib.UploadStatusReport, QueriesStatusReport {}
export interface AnalysisStatusReport
extends upload_lib.UploadStatusReport,
QueriesStatusReport {}
async function createdDBForScannedLanguages(
config: configUtils.Config,
logger: Logger) {
logger: Logger
) {
// Insert the LGTM_INDEX_X env vars at this point so they are set when
// we extract any scanned languages.
analysisPaths.includeAndExcludeAnalysisPaths(config);
@@ -53,8 +55,11 @@ async function createdDBForScannedLanguages(
const codeql = getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
if (isScannedLanguage(language)) {
logger.startGroup('Extracting ' + language);
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config.tempDir, language), language);
logger.startGroup(`Extracting ${language}`);
await codeql.extractScannedLanguage(
util.getCodeQLDatabasePath(config.tempDir, language),
language
);
logger.endGroup();
}
}
@@ -62,14 +67,16 @@ async function createdDBForScannedLanguages(
async function finalizeDatabaseCreation(
config: configUtils.Config,
logger: Logger) {
logger: Logger
) {
await createdDBForScannedLanguages(config, logger);
const codeql = getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
logger.startGroup('Finalizing ' + language);
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config.tempDir, language));
logger.startGroup(`Finalizing ${language}`);
await codeql.finalizeDatabase(
util.getCodeQLDatabasePath(config.tempDir, language)
);
logger.endGroup();
}
}
@@ -81,33 +88,45 @@ async function runQueries(
addSnippetsFlag: string,
threadsFlag: string,
config: configUtils.Config,
logger: Logger): Promise<QueriesStatusReport> {
logger: Logger
): Promise<QueriesStatusReport> {
const codeql = getCodeQL(config.codeQLCmd);
for (let language of config.languages) {
logger.startGroup('Analyzing ' + language);
for (const language of config.languages) {
logger.startGroup(`Analyzing ${language}`);
const queries = config.queries[language] || [];
if (queries.length === 0) {
throw new Error('Unable to analyse ' + language + ' as no queries were selected for this language');
throw new Error(
`Unable to analyse ${language} as no queries were selected for this language`
);
}
try {
const databasePath = util.getCodeQLDatabasePath(config.tempDir, language);
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuite = databasePath + '-queries.qls';
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
const querySuite = `${databasePath}-queries.qls`;
const querySuiteContents = queries.map((q) => `- query: ${q}`).join("\n");
fs.writeFileSync(querySuite, querySuiteContents);
logger.debug('Query suite file for ' + language + '...\n' + querySuiteContents);
logger.debug(
`Query suite file for ${language}...\n${querySuiteContents}`
);
const sarifFile = path.join(sarifFolder, language + '.sarif');
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
await codeql.databaseAnalyze(databasePath, sarifFile, querySuite, memoryFlag, addSnippetsFlag, threadsFlag);
await codeql.databaseAnalyze(
databasePath,
sarifFile,
querySuite,
memoryFlag,
addSnippetsFlag,
threadsFlag
);
logger.debug('SARIF results for database ' + language + ' created at "' + sarifFile + '"');
logger.debug(
`SARIF results for database ${language} created at "${sarifFile}"`
);
logger.endGroup();
} catch (e) {
// For now the fields about query performance are not populated
return {
@@ -137,21 +156,28 @@ export async function runAnalyze(
addSnippetsFlag: string,
threadsFlag: string,
config: configUtils.Config,
logger: Logger): Promise<AnalysisStatusReport> {
logger: Logger
): Promise<AnalysisStatusReport> {
// Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
fs.mkdirSync(outputDir, { recursive: true });
logger.info('Finalizing database creation');
logger.info("Finalizing database creation");
await finalizeDatabaseCreation(config, logger);
logger.info('Analyzing database');
const queriesStats = await runQueries(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger);
logger.info("Analyzing database");
const queriesStats = await runQueries(
outputDir,
memoryFlag,
addSnippetsFlag,
threadsFlag,
config,
logger
);
if (!doUpload) {
logger.info('Not uploading results');
logger.info("Not uploading results");
return { ...queriesStats };
}
@@ -168,7 +194,8 @@ export async function runAnalyze(
githubAuth,
githubUrl,
mode,
logger);
logger
);
return { ...queriesStats, ...uploadStats };
}

View File

@@ -1,21 +1,24 @@
import * as core from "@actions/core";
import * as github from "@actions/github";
import consoleLogLevel from "console-log-level";
import * as path from 'path';
import * as path from "path";
import { getRequiredEnvParam, isLocalRun } from "./util";
export const getApiClient = function(githubAuth: string, githubUrl: string, allowLocalRun = false) {
export const getApiClient = function (
githubAuth: string,
githubUrl: string,
allowLocalRun = false
) {
if (isLocalRun() && !allowLocalRun) {
throw new Error('Invalid API call in local run');
throw new Error("Invalid API call in local run");
}
return new github.GitHub(
{
auth: githubAuth,
baseUrl: getApiUrl(githubUrl),
userAgent: "CodeQL Action",
log: consoleLogLevel({ level: "debug" })
});
return new github.GitHub({
auth: githubAuth,
baseUrl: getApiUrl(githubUrl),
userAgent: "CodeQL Action",
log: consoleLogLevel({ level: "debug" }),
});
};
function getApiUrl(githubUrl: string): string {
@@ -23,12 +26,12 @@ function getApiUrl(githubUrl: string): string {
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === 'github.com' || url.hostname === 'api.github.com') {
return 'https://api.github.com';
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
return "https://api.github.com";
}
// Add the /api/v3 API prefix
url.pathname = path.join(url.pathname, 'api', 'v3');
url.pathname = path.join(url.pathname, "api", "v3");
return url.toString();
}
@@ -37,7 +40,8 @@ function getApiUrl(githubUrl: string): string {
// and called only from the action entrypoints.
export function getActionsApiClient(allowLocalRun = false) {
return getApiClient(
core.getInput('token'),
getRequiredEnvParam('GITHUB_SERVER_URL'),
allowLocalRun);
core.getInput("token"),
getRequiredEnvParam("GITHUB_SERVER_URL"),
allowLocalRun
);
}

View File

@@ -1,10 +1,10 @@
import * as core from '@actions/core';
import * as core from "@actions/core";
import { determineAutobuildLanguage, runAutobuild } from './autobuild';
import * as config_utils from './config-utils';
import { Language } from './languages';
import { getActionsLogger } from './logging';
import * as util from './util';
import { determineAutobuildLanguage, runAutobuild } from "./autobuild";
import * as config_utils from "./config-utils";
import { Language } from "./languages";
import { getActionsLogger } from "./logging";
import * as util from "./util";
interface AutobuildStatusReport extends util.StatusReportBase {
// Comma-separated set of languages being autobuilt
@@ -17,18 +17,22 @@ async function sendCompletedStatusReport(
startedAt: Date,
allLanguages: string[],
failingLanguage?: string,
cause?: Error) {
const status = failingLanguage !== undefined || cause !== undefined ? 'failure' : 'success';
cause?: Error
) {
const status =
failingLanguage !== undefined || cause !== undefined
? "failure"
: "success";
const statusReportBase = await util.createStatusReportBase(
'autobuild',
"autobuild",
status,
startedAt,
cause?.message,
cause?.stack);
cause?.stack
);
const statusReport: AutobuildStatusReport = {
...statusReportBase,
autobuild_languages: allLanguages.join(','),
autobuild_languages: allLanguages.join(","),
autobuild_failure: failingLanguage,
};
await util.sendStatusReport(statusReport);
@@ -40,30 +44,46 @@ async function run() {
let language: Language | undefined = undefined;
try {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('autobuild', 'starting', startedAt), true)) {
if (
!(await util.sendStatusReport(
await util.createStatusReportBase("autobuild", "starting", startedAt),
true
))
) {
return;
}
const config = await config_utils.getConfig(util.getRequiredEnvParam('RUNNER_TEMP'), logger);
const config = await config_utils.getConfig(
util.getRequiredEnvParam("RUNNER_TEMP"),
logger
);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
throw new Error(
"Config file could not be found at expected location. Has the 'init' action been called?"
);
}
language = determineAutobuildLanguage(config, logger);
if (language !== undefined) {
await runAutobuild(language, config, logger);
}
} catch (error) {
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
core.setFailed(
`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error.message}`
);
console.log(error);
await sendCompletedStatusReport(startedAt, language ? [language] : [], language, error);
await sendCompletedStatusReport(
startedAt,
language ? [language] : [],
language,
error
);
return;
}
await sendCompletedStatusReport(startedAt, language ? [language] : []);
}
run().catch(e => {
core.setFailed("autobuild action failed. " + e);
run().catch((e) => {
core.setFailed(`autobuild action failed. ${e}`);
console.log(e);
});

View File

@@ -1,13 +1,12 @@
import { getCodeQL } from './codeql';
import * as config_utils from './config-utils';
import { isTracedLanguage, Language } from './languages';
import { Logger } from './logging';
import { getCodeQL } from "./codeql";
import * as config_utils from "./config-utils";
import { Language, isTracedLanguage } from "./languages";
import { Logger } from "./logging";
export function determineAutobuildLanguage(
config: config_utils.Config,
logger: Logger
): Language | undefined {
// Attempt to find a language to autobuild
// We want pick the dominant language in the repo from the ones we're able to build
// The languages are sorted in order specified by user or by lines of code if we got
@@ -16,14 +15,20 @@ export function determineAutobuildLanguage(
const language = autobuildLanguages[0];
if (!language) {
logger.info("None of the languages in this project require extra build steps");
logger.info(
"None of the languages in this project require extra build steps"
);
return undefined;
}
logger.debug(`Detected dominant traced language: ${language}`);
if (autobuildLanguages.length > 1) {
logger.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages.slice(1).join(' and ')}, you must replace this call with custom build steps.`);
logger.warning(
`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages
.slice(1)
.join(" and ")}, you must replace this call with custom build steps.`
);
}
return language;
@@ -32,8 +37,8 @@ export function determineAutobuildLanguage(
export async function runAutobuild(
language: Language,
config: config_utils.Config,
logger: Logger) {
logger: Logger
) {
logger.startGroup(`Attempting to automatically build ${language} code`);
const codeQL = getCodeQL(config.codeQLCmd);
await codeQL.runAutobuild(language);

View File

@@ -1,61 +1,66 @@
import * as toolcache from '@actions/tool-cache';
import test from 'ava';
import nock from 'nock';
import * as path from 'path';
import * as toolcache from "@actions/tool-cache";
import test from "ava";
import nock from "nock";
import * as path from "path";
import * as codeql from './codeql';
import { getRunnerLogger } from './logging';
import {setupTests} from './testing-utils';
import * as util from './util';
import * as codeql from "./codeql";
import { getRunnerLogger } from "./logging";
import { setupTests } from "./testing-utils";
import * as util from "./util";
setupTests(test);
test('download codeql bundle cache', async t => {
await util.withTmpDir(async tmpDir => {
const versions = ['20200601', '20200610'];
test("download codeql bundle cache", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const versions = ["20200601", "20200610"];
for (let i = 0; i < versions.length; i++) {
const version = versions[i];
nock('https://example.com')
nock("https://example.com")
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
.replyWithFile(
200,
path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`)
);
await codeql.setupCodeQL(
`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`,
'token',
'https://github.example.com',
"token",
"https://github.example.com",
tmpDir,
tmpDir,
'runner',
getRunnerLogger(true));
"runner",
getRunnerLogger(true)
);
t.assert(toolcache.find('CodeQL', `0.0.0-${version}`));
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
}
const cachedVersions = toolcache.findAllVersions('CodeQL');
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
test('parse codeql bundle url version', t => {
test("parse codeql bundle url version", (t) => {
const tests = {
'20200601': '0.0.0-20200601',
'20200601.0': '0.0.0-20200601.0',
'20200601.0.0': '20200601.0.0',
'1.2.3': '1.2.3',
'1.2.3-alpha': '1.2.3-alpha',
'1.2.3-beta.1': '1.2.3-beta.1',
"20200601": "0.0.0-20200601",
"20200601.0": "0.0.0-20200601.0",
"20200601.0.0": "20200601.0.0",
"1.2.3": "1.2.3",
"1.2.3-alpha": "1.2.3-alpha",
"1.2.3-beta.1": "1.2.3-beta.1",
};
for (const [version, expectedVersion] of Object.entries(tests)) {
const url = `https://github.com/.../codeql-bundle-${version}/...`;
try {
const parsedVersion = codeql.getCodeQLURLVersion(url, getRunnerLogger(true));
const parsedVersion = codeql.getCodeQLURLVersion(
url,
getRunnerLogger(true)
);
t.deepEqual(parsedVersion, expectedVersion);
} catch (e) {
t.fail(e.message);
@@ -63,34 +68,43 @@ test('parse codeql bundle url version', t => {
}
});
test('getExtraOptions works for explicit paths', t => {
t.deepEqual(codeql.getExtraOptions({}, ['foo'], []), []);
test("getExtraOptions works for explicit paths", (t) => {
t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []);
t.deepEqual(codeql.getExtraOptions({foo: [42]}, ['foo'], []), ['42']);
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]);
t.deepEqual(codeql.getExtraOptions({foo: {bar: [42]}}, ['foo', 'bar'], []), ['42']);
t.deepEqual(
codeql.getExtraOptions({ foo: { bar: [42] } }, ["foo", "bar"], []),
["42"]
);
});
test('getExtraOptions works for wildcards', t => {
t.deepEqual(codeql.getExtraOptions({'*': [42]}, ['foo'], []), ['42']);
test("getExtraOptions works for wildcards", (t) => {
t.deepEqual(codeql.getExtraOptions({ "*": [42] }, ["foo"], []), ["42"]);
});
test('getExtraOptions works for wildcards and explicit paths', t => {
let o1 = {'*': [42], foo: [87]};
t.deepEqual(codeql.getExtraOptions(o1, ['foo'], []), ['42', '87']);
test("getExtraOptions works for wildcards and explicit paths", (t) => {
const o1 = { "*": [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o1, ["foo"], []), ["42", "87"]);
let o2 = {'*': [42], foo: [87]};
t.deepEqual(codeql.getExtraOptions(o2, ['foo', 'bar'], []), ['42']);
const o2 = { "*": [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o2, ["foo", "bar"], []), ["42"]);
let o3 = {'*': [42], foo: { '*': [87], bar: [99]}};
let p = ['foo', 'bar'];
t.deepEqual(codeql.getExtraOptions(o3, p, []), ['42', '87', '99']);
const o3 = { "*": [42], foo: { "*": [87], bar: [99] } };
const p = ["foo", "bar"];
t.deepEqual(codeql.getExtraOptions(o3, p, []), ["42", "87", "99"]);
});
test('getExtraOptions throws for bad content', t => {
t.throws(() => codeql.getExtraOptions({'*': 42}, ['foo'], []));
test("getExtraOptions throws for bad content", (t) => {
t.throws(() => codeql.getExtraOptions({ "*": 42 }, ["foo"], []));
t.throws(() => codeql.getExtraOptions({foo: 87}, ['foo'], []));
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], []));
t.throws(() => codeql.getExtraOptions({'*': [42], foo: { '*': 87, bar: [99]}}, ['foo', 'bar'], []));
t.throws(() =>
codeql.getExtraOptions(
{ "*": [42], foo: { "*": 87, bar: [99] } },
["foo", "bar"],
[]
)
);
});

View File

@@ -1,38 +1,38 @@
import * as toolrunnner from '@actions/exec/lib/toolrunner';
import * as http from '@actions/http-client';
import { IHeaders } from '@actions/http-client/interfaces';
import * as toolcache from '@actions/tool-cache';
import * as fs from 'fs';
import * as path from 'path';
import * as semver from 'semver';
import * as stream from 'stream';
import * as globalutil from 'util';
import uuidV4 from 'uuid/v4';
import * as toolrunnner from "@actions/exec/lib/toolrunner";
import * as http from "@actions/http-client";
import { IHeaders } from "@actions/http-client/interfaces";
import * as toolcache from "@actions/tool-cache";
import * as fs from "fs";
import * as path from "path";
import * as semver from "semver";
import * as stream from "stream";
import * as globalutil from "util";
import uuidV4 from "uuid/v4";
import * as api from './api-client';
import * as defaults from './defaults.json'; // Referenced from codeql-action-sync-tool!
import { Language } from './languages';
import { Logger } from './logging';
import * as util from './util';
import * as api from "./api-client";
import * as defaults from "./defaults.json"; // Referenced from codeql-action-sync-tool!
import { Language } from "./languages";
import { Logger } from "./logging";
import * as util from "./util";
type Options = (string|number|boolean)[];
type Options = Array<string | number | boolean>;
/**
* Extra command line options for the codeql commands.
*/
interface ExtraOptions {
'*'?: Options;
"*"?: Options;
database?: {
'*'?: Options,
init?: Options,
'trace-command'?: Options,
analyze?: Options,
finalize?: Options
"*"?: Options;
init?: Options;
"trace-command"?: Options;
analyze?: Options;
finalize?: Options;
};
resolve?: {
'*'?: Options,
extractor?: Options,
queries?: Options
"*"?: Options;
extractor?: Options;
queries?: Options;
};
}
@@ -53,7 +53,11 @@ export interface CodeQL {
/**
* Run 'codeql database init'.
*/
databaseInit(databasePath: string, language: Language, sourceRoot: string): Promise<void>;
databaseInit(
databasePath: string,
language: Language,
sourceRoot: string
): Promise<void>;
/**
* Runs the autobuilder for the given language.
*/
@@ -70,7 +74,10 @@ export interface CodeQL {
/**
* Run 'codeql resolve queries'.
*/
resolveQueries(queries: string[], extraSearchPath: string | undefined): Promise<ResolveQueriesOutput>;
resolveQueries(
queries: string[],
extraSearchPath: string | undefined
): Promise<ResolveQueriesOutput>;
/**
* Run 'codeql database analyze'.
*/
@@ -80,20 +87,21 @@ export interface CodeQL {
querySuite: string,
memoryFlag: string,
addSnippetsFlag: string,
threadsFlag: string): Promise<void>;
threadsFlag: string
): Promise<void>;
}
export interface ResolveQueriesOutput {
byLanguage: {
[language: string]: {
[queryPath: string]: {}
}
[queryPath: string]: {};
};
};
noDeclaredLanguage: {
[queryPath: string]: {}
[queryPath: string]: {};
};
multipleDeclaredLanguages: {
[queryPath: string]: {}
[queryPath: string]: {};
};
}
@@ -108,7 +116,7 @@ const CODEQL_BUNDLE_NAME = "codeql-bundle.tar.gz";
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
function getCodeQLActionRepository(mode: util.Mode): string {
if (mode !== 'actions') {
if (mode !== "actions") {
return CODEQL_DEFAULT_ACTION_REPOSITORY;
}
@@ -120,19 +128,22 @@ function getCodeQLActionRepository(mode: util.Mode): string {
const relativeScriptPath = path.relative(actionsDirectory, __filename);
// This handles the case where the Action does not come from an Action repository,
// e.g. our integration tests which use the Action code from the current checkout.
if (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath)) {
if (
relativeScriptPath.startsWith("..") ||
path.isAbsolute(relativeScriptPath)
) {
return CODEQL_DEFAULT_ACTION_REPOSITORY;
}
const relativeScriptPathParts = relativeScriptPath.split(path.sep);
return relativeScriptPathParts[0] + "/" + relativeScriptPathParts[1];
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
}
async function getCodeQLBundleDownloadURL(
githubAuth: string,
githubUrl: string,
mode: util.Mode,
logger: Logger): Promise<string> {
logger: Logger
): Promise<string> {
const codeQLActionRepository = getCodeQLActionRepository(mode);
const potentialDownloadSources = [
// This GitHub instance, and this Action.
@@ -144,28 +155,39 @@ async function getCodeQLBundleDownloadURL(
];
// We now filter out any duplicates.
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url));
for (let downloadSource of uniqueDownloadSources) {
let [apiURL, repository] = downloadSource;
const uniqueDownloadSources = potentialDownloadSources.filter(
(url, index, self) => index === self.indexOf(url)
);
for (const downloadSource of uniqueDownloadSources) {
const [apiURL, repository] = downloadSource;
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
if (apiURL === util.GITHUB_DOTCOM_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
if (
apiURL === util.GITHUB_DOTCOM_URL &&
repository === CODEQL_DEFAULT_ACTION_REPOSITORY
) {
break;
}
let [repositoryOwner, repositoryName] = repository.split("/");
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release = await api.getApiClient(githubAuth, githubUrl).repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: CODEQL_BUNDLE_VERSION
});
for (let asset of release.data.assets) {
const release = await api
.getApiClient(githubAuth, githubUrl)
.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: CODEQL_BUNDLE_VERSION,
});
for (const asset of release.data.assets) {
if (asset.name === CODEQL_BUNDLE_NAME) {
logger.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
logger.info(
`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`
);
return asset.url;
}
}
} catch (e) {
logger.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
logger.info(
`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`
);
}
}
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${CODEQL_BUNDLE_NAME}`;
@@ -177,13 +199,15 @@ async function toolcacheDownloadTool(
url: string,
headers: IHeaders | undefined,
tempDir: string,
logger: Logger): Promise<string> {
const client = new http.HttpClient('CodeQL Action');
logger: Logger
): Promise<string> {
const client = new http.HttpClient("CodeQL Action");
const dest = path.join(tempDir, uuidV4());
const response: http.HttpClientResponse = await client.get(url, headers);
if (response.message.statusCode !== 200) {
logger.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
logger.info(
`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`
);
throw new Error(`Unexpected HTTP response: ${response.message.statusCode}`);
}
const pipeline = globalutil.promisify(stream.pipeline);
@@ -199,53 +223,71 @@ export async function setupCodeQL(
tempDir: string,
toolsDir: string,
mode: util.Mode,
logger: Logger): Promise<CodeQL> {
logger: Logger
): Promise<CodeQL> {
// Setting these two env vars makes the toolcache code safe to use outside,
// of actions but this is obviously not a great thing we're doing and it would
// be better to write our own implementation to use outside of actions.
process.env['RUNNER_TEMP'] = tempDir;
process.env['RUNNER_TOOL_CACHE'] = toolsDir;
process.env["RUNNER_TEMP"] = tempDir;
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
try {
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`, logger);
const codeqlURLVersion = getCodeQLURLVersion(
codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`,
logger
);
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
let codeqlFolder = toolcache.find("CodeQL", codeqlURLVersion);
if (codeqlFolder) {
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
} else {
if (!codeqlURL) {
codeqlURL = await getCodeQLBundleDownloadURL(githubAuth, githubUrl, mode, logger);
codeqlURL = await getCodeQLBundleDownloadURL(
githubAuth,
githubUrl,
mode,
logger
);
}
const headers: IHeaders = {accept: 'application/octet-stream'};
const headers: IHeaders = { accept: "application/octet-stream" };
// We only want to provide an authorization header if we are downloading
// from the same GitHub instance the Action is running on.
// This avoids leaking Enterprise tokens to dotcom.
if (codeqlURL.startsWith(githubUrl + "/")) {
logger.debug('Downloading CodeQL bundle with token.');
if (codeqlURL.startsWith(`${githubUrl}/`)) {
logger.debug("Downloading CodeQL bundle with token.");
headers.authorization = `token ${githubAuth}`;
} else {
logger.debug('Downloading CodeQL bundle without token.');
logger.debug("Downloading CodeQL bundle without token.");
}
logger.info(`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`);
let codeqlPath = await toolcacheDownloadTool(codeqlURL, headers, tempDir, logger);
logger.info(
`Downloading CodeQL tools from ${codeqlURL}. This may take a while.`
);
const codeqlPath = await toolcacheDownloadTool(
codeqlURL,
headers,
tempDir,
logger
);
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
codeqlFolder = await toolcache.cacheDir(
codeqlExtracted,
"CodeQL",
codeqlURLVersion
);
}
let codeqlCmd = path.join(codeqlFolder, 'codeql', 'codeql');
if (process.platform === 'win32') {
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
if (process.platform === "win32") {
codeqlCmd += ".exe";
} else if (process.platform !== 'linux' && process.platform !== 'darwin') {
throw new Error("Unsupported platform: " + process.platform);
} else if (process.platform !== "linux" && process.platform !== "darwin") {
throw new Error(`Unsupported platform: ${process.platform}`);
}
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
return cachedCodeQL;
} catch (e) {
logger.error(e);
throw new Error("Unable to download and extract CodeQL CLI");
@@ -253,22 +295,27 @@ export async function setupCodeQL(
}
export function getCodeQLURLVersion(url: string, logger: Logger): string {
const match = url.match(/\/codeql-bundle-(.*)\//);
if (match === null || match.length < 2) {
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
throw new Error(
`Malformed tools url: ${url}. Version could not be inferred`
);
}
let version = match[1];
if (!semver.valid(version)) {
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
version = '0.0.0-' + version;
logger.debug(
`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`
);
version = `0.0.0-${version}`;
}
const s = semver.clean(version);
if (!s) {
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
throw new Error(
`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`
);
}
return s;
@@ -287,13 +334,14 @@ export function getCodeQL(cmd: string): CodeQL {
function resolveFunction<T>(
partialCodeql: Partial<CodeQL>,
methodName: string,
defaultImplementation?: T): T {
if (typeof partialCodeql[methodName] !== 'function') {
defaultImplementation?: T
): T {
if (typeof partialCodeql[methodName] !== "function") {
if (defaultImplementation !== undefined) {
return defaultImplementation;
}
const dummyMethod = () => {
throw new Error('CodeQL ' + methodName + ' method not correctly defined');
throw new Error(`CodeQL ${methodName} method not correctly defined`);
};
return dummyMethod as any;
}
@@ -308,15 +356,18 @@ function resolveFunction<T>(
*/
export function setCodeQL(partialCodeql: Partial<CodeQL>): CodeQL {
cachedCodeQL = {
getPath: resolveFunction(partialCodeql, 'getPath', () => '/tmp/dummy-path'),
printVersion: resolveFunction(partialCodeql, 'printVersion'),
getTracerEnv: resolveFunction(partialCodeql, 'getTracerEnv'),
databaseInit: resolveFunction(partialCodeql, 'databaseInit'),
runAutobuild: resolveFunction(partialCodeql, 'runAutobuild'),
extractScannedLanguage: resolveFunction(partialCodeql, 'extractScannedLanguage'),
finalizeDatabase: resolveFunction(partialCodeql, 'finalizeDatabase'),
resolveQueries: resolveFunction(partialCodeql, 'resolveQueries'),
databaseAnalyze: resolveFunction(partialCodeql, 'databaseAnalyze')
getPath: resolveFunction(partialCodeql, "getPath", () => "/tmp/dummy-path"),
printVersion: resolveFunction(partialCodeql, "printVersion"),
getTracerEnv: resolveFunction(partialCodeql, "getTracerEnv"),
databaseInit: resolveFunction(partialCodeql, "databaseInit"),
runAutobuild: resolveFunction(partialCodeql, "runAutobuild"),
extractScannedLanguage: resolveFunction(
partialCodeql,
"extractScannedLanguage"
),
finalizeDatabase: resolveFunction(partialCodeql, "finalizeDatabase"),
resolveQueries: resolveFunction(partialCodeql, "resolveQueries"),
databaseAnalyze: resolveFunction(partialCodeql, "databaseAnalyze"),
};
return cachedCodeQL;
}
@@ -330,27 +381,33 @@ export function setCodeQL(partialCodeql: Partial<CodeQL>): CodeQL {
export function getCachedCodeQL(): CodeQL {
if (cachedCodeQL === undefined) {
// Should never happen as setCodeQL is called by testing-utils.setupTests
throw new Error('cachedCodeQL undefined');
throw new Error("cachedCodeQL undefined");
}
return cachedCodeQL;
}
function getCodeQLForCmd(cmd: string): CodeQL {
return {
getPath: function() {
getPath() {
return cmd;
},
printVersion: async function() {
async printVersion() {
await new toolrunnner.ToolRunner(cmd, [
'version',
'--format=json'
"version",
"--format=json",
]).exec();
},
getTracerEnv: async function(databasePath: string) {
async getTracerEnv(databasePath: string) {
// Write tracer-env.js to a temp location.
const tracerEnvJs = path.resolve(databasePath, 'working', 'tracer-env.js');
fs.mkdirSync(path.dirname(tracerEnvJs), {recursive: true});
fs.writeFileSync(tracerEnvJs, `
const tracerEnvJs = path.resolve(
databasePath,
"working",
"tracer-env.js"
);
fs.mkdirSync(path.dirname(tracerEnvJs), { recursive: true });
fs.writeFileSync(
tracerEnvJs,
`
const fs = require('fs');
const env = {};
for (let entry of Object.entries(process.env)) {
@@ -361,129 +418,156 @@ function getCodeQLForCmd(cmd: string): CodeQL {
}
}
process.stdout.write(process.argv[2]);
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');`);
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');`
);
const envFile = path.resolve(databasePath, 'working', 'env.tmp');
const envFile = path.resolve(databasePath, "working", "env.tmp");
await new toolrunnner.ToolRunner(cmd, [
'database',
'trace-command',
"database",
"trace-command",
databasePath,
...getExtraOptionsFromEnv(['database', 'trace-command']),
...getExtraOptionsFromEnv(["database", "trace-command"]),
process.execPath,
tracerEnvJs,
envFile
envFile,
]).exec();
return JSON.parse(fs.readFileSync(envFile, 'utf-8'));
return JSON.parse(fs.readFileSync(envFile, "utf-8"));
},
databaseInit: async function(databasePath: string, language: Language, sourceRoot: string) {
async databaseInit(
databasePath: string,
language: Language,
sourceRoot: string
) {
await new toolrunnner.ToolRunner(cmd, [
'database',
'init',
"database",
"init",
databasePath,
'--language=' + language,
'--source-root=' + sourceRoot,
...getExtraOptionsFromEnv(['database', 'init']),
`--language=${language}`,
`--source-root=${sourceRoot}`,
...getExtraOptionsFromEnv(["database", "init"]),
]).exec();
},
runAutobuild: async function(language: Language) {
const cmdName = process.platform === 'win32' ? 'autobuild.cmd' : 'autobuild.sh';
const autobuildCmd = path.join(path.dirname(cmd), language, 'tools', cmdName);
async runAutobuild(language: Language) {
const cmdName =
process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh";
const autobuildCmd = path.join(
path.dirname(cmd),
language,
"tools",
cmdName
);
// Update JAVA_TOOL_OPTIONS to contain '-Dhttp.keepAlive=false'
// This is because of an issue with Azure pipelines timing out connections after 4 minutes
// and Maven not properly handling closed connections
// Otherwise long build processes will timeout when pulling down Java packages
// https://developercommunity.visualstudio.com/content/problem/292284/maven-hosted-agent-connection-timeout.html
let javaToolOptions = process.env['JAVA_TOOL_OPTIONS'] || "";
process.env['JAVA_TOOL_OPTIONS'] = [...javaToolOptions.split(/\s+/), '-Dhttp.keepAlive=false', '-Dmaven.wagon.http.pool=false'].join(' ');
const javaToolOptions = process.env["JAVA_TOOL_OPTIONS"] || "";
process.env["JAVA_TOOL_OPTIONS"] = [
...javaToolOptions.split(/\s+/),
"-Dhttp.keepAlive=false",
"-Dmaven.wagon.http.pool=false",
].join(" ");
await new toolrunnner.ToolRunner(autobuildCmd).exec();
},
extractScannedLanguage: async function(databasePath: string, language: Language) {
async extractScannedLanguage(databasePath: string, language: Language) {
// Get extractor location
let extractorPath = '';
let extractorPath = "";
await new toolrunnner.ToolRunner(
cmd,
[
'resolve',
'extractor',
'--format=json',
'--language=' + language,
...getExtraOptionsFromEnv(['resolve', 'extractor']),
"resolve",
"extractor",
"--format=json",
`--language=${language}`,
...getExtraOptionsFromEnv(["resolve", "extractor"]),
],
{
silent: true,
listeners: {
stdout: (data) => { extractorPath += data.toString(); },
stderr: (data) => { process.stderr.write(data); }
}
}).exec();
stdout: (data) => {
extractorPath += data.toString();
},
stderr: (data) => {
process.stderr.write(data);
},
},
}
).exec();
// Set trace command
const ext = process.platform === 'win32' ? '.cmd' : '.sh';
const traceCommand = path.resolve(JSON.parse(extractorPath), 'tools', 'autobuild' + ext);
const ext = process.platform === "win32" ? ".cmd" : ".sh";
const traceCommand = path.resolve(
JSON.parse(extractorPath),
"tools",
`autobuild${ext}`
);
// Run trace command
await new toolrunnner.ToolRunner(cmd, [
'database',
'trace-command',
...getExtraOptionsFromEnv(['database', 'trace-command']),
"database",
"trace-command",
...getExtraOptionsFromEnv(["database", "trace-command"]),
databasePath,
'--',
traceCommand
"--",
traceCommand,
]).exec();
},
finalizeDatabase: async function(databasePath: string) {
async finalizeDatabase(databasePath: string) {
await new toolrunnner.ToolRunner(cmd, [
'database',
'finalize',
...getExtraOptionsFromEnv(['database', 'finalize']),
databasePath
"database",
"finalize",
...getExtraOptionsFromEnv(["database", "finalize"]),
databasePath,
]).exec();
},
resolveQueries: async function(queries: string[], extraSearchPath: string | undefined) {
async resolveQueries(
queries: string[],
extraSearchPath: string | undefined
) {
const codeqlArgs = [
'resolve',
'queries',
"resolve",
"queries",
...queries,
'--format=bylanguage',
...getExtraOptionsFromEnv(['resolve', 'queries'])
"--format=bylanguage",
...getExtraOptionsFromEnv(["resolve", "queries"]),
];
if (extraSearchPath !== undefined) {
codeqlArgs.push('--search-path', extraSearchPath);
codeqlArgs.push("--search-path", extraSearchPath);
}
let output = '';
let output = "";
await new toolrunnner.ToolRunner(cmd, codeqlArgs, {
listeners: {
stdout: (data: Buffer) => {
output += data.toString();
}
}
},
},
}).exec();
return JSON.parse(output);
},
databaseAnalyze: async function(
async databaseAnalyze(
databasePath: string,
sarifFile: string,
querySuite: string,
memoryFlag: string,
addSnippetsFlag: string,
threadsFlag: string) {
threadsFlag: string
) {
await new toolrunnner.ToolRunner(cmd, [
'database',
'analyze',
"database",
"analyze",
memoryFlag,
threadsFlag,
databasePath,
'--format=sarif-latest',
'--output=' + sarifFile,
"--format=sarif-latest",
`--output=${sarifFile}`,
addSnippetsFlag,
...getExtraOptionsFromEnv(['database', 'analyze']),
querySuite
...getExtraOptionsFromEnv(["database", "analyze"]),
querySuite,
]).exec();
}
},
};
}
@@ -491,7 +575,7 @@ function getCodeQLForCmd(cmd: string): CodeQL {
* Gets the options for `path` of `options` as an array of extra option strings.
*/
function getExtraOptionsFromEnv(path: string[]) {
let options: ExtraOptions = util.getExtraOptionsEnvParam();
const options: ExtraOptions = util.getExtraOptionsEnvParam();
return getExtraOptions(options, path, []);
}
@@ -504,7 +588,8 @@ function getExtraOptionsFromEnv(path: string[]) {
export /* exported for testing */ function getExtraOptions(
options: any,
path: string[],
pathInfo: string[]): string[] {
pathInfo: string[]
): string[] {
/**
* Gets `options` as an array of extra option strings.
*
@@ -515,23 +600,30 @@ export /* exported for testing */ function getExtraOptions(
return [];
}
if (!Array.isArray(options)) {
const msg =
`The extra options for '${pathInfo.join('.')}' ('${JSON.stringify(options)}') are not in an array.`;
const msg = `The extra options for '${pathInfo.join(
"."
)}' ('${JSON.stringify(options)}') are not in an array.`;
throw new Error(msg);
}
return options.map(o => {
return options.map((o) => {
const t = typeof o;
if (t !== 'string' && t !== 'number' && t !== 'boolean') {
const msg =
`The extra option for '${pathInfo.join('.')}' ('${JSON.stringify(o)}') is not a primitive value.`;
if (t !== "string" && t !== "number" && t !== "boolean") {
const msg = `The extra option for '${pathInfo.join(
"."
)}' ('${JSON.stringify(o)}') is not a primitive value.`;
throw new Error(msg);
}
return o + '';
return `${o}`;
});
}
let all = asExtraOptions(options?.['*'], pathInfo.concat('*'));
let specific = path.length === 0 ?
asExtraOptions(options, pathInfo) :
getExtraOptions(options?.[path[0]], path?.slice(1), pathInfo.concat(path[0]));
const all = asExtraOptions(options?.["*"], pathInfo.concat("*"));
const specific =
path.length === 0
? asExtraOptions(options, pathInfo)
: getExtraOptions(
options?.[path[0]],
path?.slice(1),
pathInfo.concat(path[0])
);
return all.concat(specific);
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,33 +1,33 @@
import * as fs from 'fs';
import * as yaml from 'js-yaml';
import * as path from 'path';
import * as fs from "fs";
import * as yaml from "js-yaml";
import * as path from "path";
import * as api from './api-client';
import { CodeQL, ResolveQueriesOutput } from './codeql';
import * as api from "./api-client";
import { CodeQL, ResolveQueriesOutput } from "./codeql";
import * as externalQueries from "./external-queries";
import { Language, parseLanguage } from "./languages";
import { Logger } from './logging';
import { RepositoryNwo } from './repository';
import { Logger } from "./logging";
import { RepositoryNwo } from "./repository";
// Property names from the user-supplied config file.
const NAME_PROPERTY = 'name';
const DISABLE_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
const QUERIES_PROPERTY = 'queries';
const QUERIES_USES_PROPERTY = 'uses';
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
const PATHS_PROPERTY = 'paths';
const NAME_PROPERTY = "name";
const DISABLE_DEFAULT_QUERIES_PROPERTY = "disable-default-queries";
const QUERIES_PROPERTY = "queries";
const QUERIES_USES_PROPERTY = "uses";
const PATHS_IGNORE_PROPERTY = "paths-ignore";
const PATHS_PROPERTY = "paths";
/**
* Format of the config file supplied by the user.
*/
export interface UserConfig {
name?: string;
'disable-default-queries'?: boolean;
queries?: {
"disable-default-queries"?: boolean;
queries?: Array<{
name?: string;
uses: string;
}[];
'paths-ignore'?: string[];
}>;
"paths-ignore"?: string[];
paths?: string[];
}
@@ -86,16 +86,17 @@ export interface Config {
*
* Format is a map from language to an array of path suffixes of .ql files.
*/
const DISABLED_BUILTIN_QUERIES: {[language: string]: string[]} = {
'csharp': [
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
]
const DISABLED_BUILTIN_QUERIES: { [language: string]: string[] } = {
csharp: [
"ql/src/Security Features/CWE-937/VulnerablePackage.ql",
"ql/src/Security Features/CWE-451/MissingXFrameOptions.ql",
],
};
function queryIsDisabled(language, query): boolean {
return (DISABLED_BUILTIN_QUERIES[language] || [])
.some(disabledQuery => query.endsWith(disabledQuery));
return (DISABLED_BUILTIN_QUERIES[language] || []).some((disabledQuery) =>
query.endsWith(disabledQuery)
);
}
/**
@@ -106,17 +107,25 @@ function validateQueries(resolvedQueries: ResolveQueriesOutput) {
const noDeclaredLanguage = resolvedQueries.noDeclaredLanguage;
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
if (noDeclaredLanguageQueries.length !== 0) {
throw new Error('The following queries do not declare a language. ' +
'Their qlpack.yml files are either missing or is invalid.\n' +
noDeclaredLanguageQueries.join('\n'));
throw new Error(
`${
"The following queries do not declare a language. " +
"Their qlpack.yml files are either missing or is invalid.\n"
}${noDeclaredLanguageQueries.join("\n")}`
);
}
const multipleDeclaredLanguages = resolvedQueries.multipleDeclaredLanguages;
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
const multipleDeclaredLanguagesQueries = Object.keys(
multipleDeclaredLanguages
);
if (multipleDeclaredLanguagesQueries.length !== 0) {
throw new Error('The following queries declare multiple languages. ' +
'Their qlpack.yml files are either missing or is invalid.\n' +
multipleDeclaredLanguagesQueries.join('\n'));
throw new Error(
`${
"The following queries declare multiple languages. " +
"Their qlpack.yml files are either missing or is invalid.\n"
}${multipleDeclaredLanguagesQueries.join("\n")}`
);
}
}
@@ -128,15 +137,22 @@ async function runResolveQueries(
resultMap: { [language: string]: string[] },
toResolve: string[],
extraSearchPath: string | undefined,
errorOnInvalidQueries: boolean) {
errorOnInvalidQueries: boolean
) {
const resolvedQueries = await codeQL.resolveQueries(
toResolve,
extraSearchPath
);
const resolvedQueries = await codeQL.resolveQueries(toResolve, extraSearchPath);
for (const [language, queries] of Object.entries(resolvedQueries.byLanguage)) {
for (const [language, queries] of Object.entries(
resolvedQueries.byLanguage
)) {
if (resultMap[language] === undefined) {
resultMap[language] = [];
}
resultMap[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
resultMap[language].push(
...Object.keys(queries).filter((q) => !queryIsDisabled(language, q))
);
}
if (errorOnInvalidQueries) {
@@ -147,13 +163,17 @@ async function runResolveQueries(
/**
* Get the set of queries included by default.
*/
async function addDefaultQueries(codeQL: CodeQL, languages: string[], resultMap: { [language: string]: string[] }) {
const suites = languages.map(l => l + '-code-scanning.qls');
async function addDefaultQueries(
codeQL: CodeQL,
languages: string[],
resultMap: { [language: string]: string[] }
) {
const suites = languages.map((l) => `${l}-code-scanning.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined, false);
}
// The set of acceptable values for built-in suites from the codeql bundle
const builtinSuites = ['security-extended', 'security-and-quality'] as const;
const builtinSuites = ["security-extended", "security-and-quality"] as const;
/**
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
@@ -164,14 +184,14 @@ async function addBuiltinSuiteQueries(
codeQL: CodeQL,
resultMap: { [language: string]: string[] },
suiteName: string,
configFile?: string) {
configFile?: string
) {
const suite = builtinSuites.find((suite) => suite === suiteName);
if (!suite) {
throw new Error(getQueryUsesInvalid(configFile, suiteName));
}
const suites = languages.map(l => l + '-' + suiteName + '.qls');
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined, false);
}
@@ -183,8 +203,8 @@ async function addLocalQueries(
resultMap: { [language: string]: string[] },
localQueryPath: string,
checkoutPath: string,
configFile?: string) {
configFile?: string
) {
// Resolve the local path against the workspace so that when this is
// passed to codeql it resolves to exactly the path we expect it to resolve to.
let absoluteQueryPath = path.join(checkoutPath, localQueryPath);
@@ -198,11 +218,23 @@ async function addLocalQueries(
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
// Check the local path doesn't jump outside the repo using '..' or symlinks
if (!(absoluteQueryPath + path.sep).startsWith(fs.realpathSync(checkoutPath) + path.sep)) {
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
if (
!(absoluteQueryPath + path.sep).startsWith(
fs.realpathSync(checkoutPath) + path.sep
)
) {
throw new Error(
getLocalPathOutsideOfRepository(configFile, localQueryPath)
);
}
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], checkoutPath, true);
await runResolveQueries(
codeQL,
resultMap,
[absoluteQueryPath],
checkoutPath,
true
);
}
/**
@@ -215,16 +247,16 @@ async function addRemoteQueries(
tempDir: string,
githubUrl: string,
logger: Logger,
configFile?: string) {
let tok = queryUses.split('@');
configFile?: string
) {
let tok = queryUses.split("@");
if (tok.length !== 2) {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
const ref = tok[1];
tok = tok[0].split('/');
tok = tok[0].split("/");
// The first token is the owner
// The second token is the repo
// The rest is a path, if there is more than one token combine them to form the full path
@@ -232,10 +264,10 @@ async function addRemoteQueries(
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
// Check none of the parts of the repository name are empty
if (tok[0].trim() === '' || tok[1].trim() === '') {
if (tok[0].trim() === "" || tok[1].trim() === "") {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
const nwo = tok[0] + '/' + tok[1];
const nwo = `${tok[0]}/${tok[1]}`;
// Checkout the external repository
const checkoutPath = await externalQueries.checkoutExternalRepository(
@@ -243,11 +275,13 @@ async function addRemoteQueries(
ref,
githubUrl,
tempDir,
logger);
logger
);
const queryPath = tok.length > 2
? path.join(checkoutPath, tok.slice(2).join('/'))
: checkoutPath;
const queryPath =
tok.length > 2
? path.join(checkoutPath, tok.slice(2).join("/"))
: checkoutPath;
await runResolveQueries(codeQL, resultMap, [queryPath], checkoutPath, true);
}
@@ -269,8 +303,8 @@ async function parseQueryUses(
checkoutPath: string,
githubUrl: string,
logger: Logger,
configFile?: string) {
configFile?: string
) {
queryUses = queryUses.trim();
if (queryUses === "") {
throw new Error(getQueryUsesInvalid(configFile));
@@ -278,18 +312,38 @@ async function parseQueryUses(
// Check for the local path case before we start trying to parse the repository name
if (queryUses.startsWith("./")) {
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), checkoutPath, configFile);
await addLocalQueries(
codeQL,
resultMap,
queryUses.slice(2),
checkoutPath,
configFile
);
return;
}
// Check for one of the builtin suites
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
await addBuiltinSuiteQueries(languages, codeQL, resultMap, queryUses, configFile);
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
await addBuiltinSuiteQueries(
languages,
codeQL,
resultMap,
queryUses,
configFile
);
return;
}
// Otherwise, must be a reference to another repo
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile);
await addRemoteQueries(
codeQL,
resultMap,
queryUses,
tempDir,
githubUrl,
logger,
configFile
);
}
// Regex validating stars in paths or paths-ignore entries.
@@ -307,58 +361,70 @@ export function validateAndSanitisePath(
originalPath: string,
propertyName: string,
configFile: string,
logger: Logger): string {
logger: Logger
): string {
// Take a copy so we don't modify the original path, so we can still construct error messages
let path = originalPath;
// All paths are relative to the src root, so strip off leading slashes.
while (path.charAt(0) === '/') {
while (path.charAt(0) === "/") {
path = path.substring(1);
}
// Trailing ** are redundant, so strip them off
if (path.endsWith('/**')) {
if (path.endsWith("/**")) {
path = path.substring(0, path.length - 2);
}
// An empty path is not allowed as it's meaningless
if (path === '') {
throw new Error(getConfigFilePropertyError(
configFile,
propertyName,
'"' + originalPath + '" is not an invalid path. ' +
'It is not necessary to include it, and it is not allowed to exclude it.'));
if (path === "") {
throw new Error(
getConfigFilePropertyError(
configFile,
propertyName,
`"${originalPath}" is not an invalid path. ` +
`It is not necessary to include it, and it is not allowed to exclude it.`
)
);
}
// Check for illegal uses of **
if (path.match(pathStarsRegex)) {
throw new Error(getConfigFilePropertyError(
configFile,
propertyName,
'"' + originalPath + '" contains an invalid "**" wildcard. ' +
'They must be immediately preceeded and followed by a slash as in "/**/", or come at the start or end.'));
throw new Error(
getConfigFilePropertyError(
configFile,
propertyName,
`"${originalPath}" contains an invalid "**" wildcard. ` +
`They must be immediately preceeded and followed by a slash as in "/**/", or come at the start or end.`
)
);
}
// Check for other regex characters that we don't support.
// Output a warning so the user knows, but otherwise continue normally.
if (path.match(filterPatternCharactersRegex)) {
logger.warning(getConfigFilePropertyError(
configFile,
propertyName,
'"' + originalPath + '" contains an unsupported character. ' +
'The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.'));
logger.warning(
getConfigFilePropertyError(
configFile,
propertyName,
`"${originalPath}" contains an unsupported character. ` +
`The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.`
)
);
}
// Ban any uses of backslash for now.
// This may not play nicely with project layouts.
// This restriction can be lifted later if we determine they are ok.
if (path.indexOf('\\') !== -1) {
throw new Error(getConfigFilePropertyError(
configFile,
propertyName,
'"' + originalPath + '" contains an "\\" character. These are not allowed in filters. ' +
'If running on windows we recommend using "/" instead for path filters.'));
if (path.indexOf("\\") !== -1) {
throw new Error(
getConfigFilePropertyError(
configFile,
propertyName,
`"${originalPath}" contains an "\\" character. These are not allowed in filters. ` +
`If running on windows we recommend using "/" instead for path filters.`
)
);
}
return path;
@@ -368,86 +434,132 @@ export function validateAndSanitisePath(
// the property was in a workflow file, not a config file
export function getNameInvalid(configFile: string): string {
return getConfigFilePropertyError(configFile, NAME_PROPERTY, 'must be a non-empty string');
return getConfigFilePropertyError(
configFile,
NAME_PROPERTY,
"must be a non-empty string"
);
}
export function getDisableDefaultQueriesInvalid(configFile: string): string {
return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
return getConfigFilePropertyError(
configFile,
DISABLE_DEFAULT_QUERIES_PROPERTY,
"must be a boolean"
);
}
export function getQueriesInvalid(configFile: string): string {
return getConfigFilePropertyError(configFile, QUERIES_PROPERTY, 'must be an array');
}
export function getQueryUsesInvalid(configFile: string | undefined, queryUses?: string): string {
return getConfigFilePropertyError(
configFile,
QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY,
'must be a built-in suite (' + builtinSuites.join(' or ') +
'), a relative path, or be of the form "owner/repo[/path]@ref"' +
(queryUses !== undefined ? '\n Found: ' + queryUses : ''));
QUERIES_PROPERTY,
"must be an array"
);
}
export function getQueryUsesInvalid(
configFile: string | undefined,
queryUses?: string
): string {
return getConfigFilePropertyError(
configFile,
`${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`,
`must be a built-in suite (${builtinSuites.join(
" or "
)}), a relative path, or be of the form "owner/repo[/path]@ref"${
queryUses !== undefined ? `\n Found: ${queryUses}` : ""
}`
);
}
export function getPathsIgnoreInvalid(configFile: string): string {
return getConfigFilePropertyError(configFile, PATHS_IGNORE_PROPERTY, 'must be an array of non-empty strings');
return getConfigFilePropertyError(
configFile,
PATHS_IGNORE_PROPERTY,
"must be an array of non-empty strings"
);
}
export function getPathsInvalid(configFile: string): string {
return getConfigFilePropertyError(configFile, PATHS_PROPERTY, 'must be an array of non-empty strings');
}
export function getLocalPathOutsideOfRepository(configFile: string | undefined, localPath: string): string {
return getConfigFilePropertyError(
configFile,
QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY,
'is invalid as the local path "' + localPath + '" is outside of the repository');
PATHS_PROPERTY,
"must be an array of non-empty strings"
);
}
export function getLocalPathDoesNotExist(configFile: string | undefined, localPath: string): string {
export function getLocalPathOutsideOfRepository(
configFile: string | undefined,
localPath: string
): string {
return getConfigFilePropertyError(
configFile,
QUERIES_PROPERTY + '.' + QUERIES_USES_PROPERTY,
'is invalid as the local path "' + localPath + '" does not exist in the repository');
`${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`,
`is invalid as the local path "${localPath}" is outside of the repository`
);
}
export function getConfigFileOutsideWorkspaceErrorMessage(configFile: string): string {
return 'The configuration file "' + configFile + '" is outside of the workspace';
export function getLocalPathDoesNotExist(
configFile: string | undefined,
localPath: string
): string {
return getConfigFilePropertyError(
configFile,
`${QUERIES_PROPERTY}.${QUERIES_USES_PROPERTY}`,
`is invalid as the local path "${localPath}" does not exist in the repository`
);
}
export function getConfigFileDoesNotExistErrorMessage(configFile: string): string {
return 'The configuration file "' + configFile + '" does not exist';
export function getConfigFileOutsideWorkspaceErrorMessage(
configFile: string
): string {
return `The configuration file "${configFile}" is outside of the workspace`;
}
export function getConfigFileRepoFormatInvalidMessage(configFile: string): string {
let error = 'The configuration file "' + configFile + '" is not a supported remote file reference.';
error += ' Expected format <owner>/<repository>/<file-path>@<ref>';
export function getConfigFileDoesNotExistErrorMessage(
configFile: string
): string {
return `The configuration file "${configFile}" does not exist`;
}
export function getConfigFileRepoFormatInvalidMessage(
configFile: string
): string {
let error = `The configuration file "${configFile}" is not a supported remote file reference.`;
error += " Expected format <owner>/<repository>/<file-path>@<ref>";
return error;
}
export function getConfigFileFormatInvalidMessage(configFile: string): string {
return 'The configuration file "' + configFile + '" could not be read';
return `The configuration file "${configFile}" could not be read`;
}
export function getConfigFileDirectoryGivenMessage(configFile: string): string {
return 'The configuration file "' + configFile + '" looks like a directory, not a file';
return `The configuration file "${configFile}" looks like a directory, not a file`;
}
function getConfigFilePropertyError(configFile: string | undefined, property: string, error: string): string {
function getConfigFilePropertyError(
configFile: string | undefined,
property: string,
error: string
): string {
if (configFile === undefined) {
return 'The workflow property "' + property + '" is invalid: ' + error;
return `The workflow property "${property}" is invalid: ${error}`;
} else {
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
return `The configuration file "${configFile}" is invalid: property "${property}" ${error}`;
}
}
export function getNoLanguagesError(): string {
return "Did not detect any languages to analyze. " +
"Please update input in workflow or check that GitHub detects the correct languages in your repository.";
return (
"Did not detect any languages to analyze. " +
"Please update input in workflow or check that GitHub detects the correct languages in your repository."
);
}
export function getUnknownLanguagesError(languages: string[]): string {
return "Did not recognise the following languages: " + languages.join(', ');
return `Did not recognise the following languages: ${languages.join(", ")}`;
}
/**
@@ -457,23 +569,25 @@ async function getLanguagesInRepo(
repository: RepositoryNwo,
githubAuth: string,
githubUrl: string,
logger: Logger): Promise<Language[]> {
logger: Logger
): Promise<Language[]> {
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
const response = await api.getApiClient(githubAuth, githubUrl, true).repos.listLanguages({
owner: repository.owner,
repo: repository.repo
});
const response = await api
.getApiClient(githubAuth, githubUrl, true)
.repos.listLanguages({
owner: repository.owner,
repo: repository.repo,
});
logger.debug("Languages API response: " + JSON.stringify(response));
logger.debug(`Languages API response: ${JSON.stringify(response)}`);
// The GitHub API is going to return languages in order of popularity,
// When we pick a language to autobuild we want to pick the most popular traced language
// Since sets in javascript maintain insertion order, using a set here and then splatting it
// into an array gives us an array of languages ordered by popularity
let languages: Set<Language> = new Set();
for (let lang of Object.keys(response.data)) {
let parsedLang = parseLanguage(lang);
const languages: Set<Language> = new Set();
for (const lang of Object.keys(response.data)) {
const parsedLang = parseLanguage(lang);
if (parsedLang !== undefined) {
languages.add(parsedLang);
}
@@ -496,14 +610,14 @@ async function getLanguages(
repository: RepositoryNwo,
githubAuth: string,
githubUrl: string,
logger: Logger): Promise<Language[]> {
logger: Logger
): Promise<Language[]> {
// Obtain from action input 'languages' if set
let languages = (languagesInput || "")
.split(',')
.map(x => x.trim())
.filter(x => x.length > 0);
logger.info("Languages from configuration: " + JSON.stringify(languages));
.split(",")
.map((x) => x.trim())
.filter((x) => x.length > 0);
logger.info(`Languages from configuration: ${JSON.stringify(languages)}`);
if (languages.length === 0) {
// Obtain languages as all languages in the repo that can be analysed
@@ -511,8 +625,11 @@ async function getLanguages(
repository,
githubAuth,
githubUrl,
logger);
logger.info("Automatically detected languages: " + JSON.stringify(languages));
logger
);
logger.info(
`Automatically detected languages: ${JSON.stringify(languages)}`
);
}
// If the languages parameter was not given and no languages were
@@ -524,7 +641,7 @@ async function getLanguages(
// Make sure they are supported
const parsedLanguages: Language[] = [];
const unknownLanguages: string[] = [];
for (let language of languages) {
for (const language of languages) {
const parsedLanguage = parseLanguage(language);
if (parsedLanguage === undefined) {
unknownLanguages.push(language);
@@ -547,13 +664,13 @@ async function addQueriesFromWorkflow(
tempDir: string,
checkoutPath: string,
githubUrl: string,
logger: Logger) {
logger: Logger
) {
queriesInput = queriesInput.trim();
// "+" means "don't override config file" - see shouldAddConfigFileQueries
queriesInput = queriesInput.replace(/^\+/, '');
queriesInput = queriesInput.replace(/^\+/, "");
for (const query of queriesInput.split(',')) {
for (const query of queriesInput.split(",")) {
await parseQueryUses(
languages,
codeQL,
@@ -562,7 +679,8 @@ async function addQueriesFromWorkflow(
tempDir,
checkoutPath,
githubUrl,
logger);
logger
);
}
}
@@ -572,7 +690,7 @@ async function addQueriesFromWorkflow(
// should instead be added in addition
function shouldAddConfigFileQueries(queriesInput: string | undefined): boolean {
if (queriesInput) {
return queriesInput.trimStart().substr(0, 1) === '+';
return queriesInput.trimStart().substr(0, 1) === "+";
}
return true;
@@ -591,14 +709,15 @@ export async function getDefaultConfig(
checkoutPath: string,
githubAuth: string,
githubUrl: string,
logger: Logger): Promise<Config> {
logger: Logger
): Promise<Config> {
const languages = await getLanguages(
languagesInput,
repository,
githubAuth,
githubUrl,
logger);
logger
);
const queries = {};
await addDefaultQueries(codeQL, languages, queries);
if (queriesInput) {
@@ -610,12 +729,13 @@ export async function getDefaultConfig(
tempDir,
checkoutPath,
githubUrl,
logger);
logger
);
}
return {
languages: languages,
queries: queries,
languages,
queries,
pathsIgnore: [],
paths: [],
originalUserInput: {},
@@ -639,8 +759,8 @@ async function loadConfig(
checkoutPath: string,
githubAuth: string,
githubUrl: string,
logger: Logger): Promise<Config> {
logger: Logger
): Promise<Config> {
let parsedYAML: UserConfig;
if (isLocal(configFile)) {
@@ -648,10 +768,7 @@ async function loadConfig(
configFile = path.resolve(checkoutPath, configFile);
parsedYAML = getLocalConfig(configFile, checkoutPath);
} else {
parsedYAML = await getRemoteConfig(
configFile,
githubAuth,
githubUrl);
parsedYAML = await getRemoteConfig(configFile, githubAuth, githubUrl);
}
// Validate that the 'name' property is syntactically correct,
@@ -670,7 +787,8 @@ async function loadConfig(
repository,
githubAuth,
githubUrl,
logger);
logger
);
const queries = {};
const pathsIgnore: string[] = [];
@@ -700,14 +818,21 @@ async function loadConfig(
tempDir,
checkoutPath,
githubUrl,
logger);
logger
);
}
if (shouldAddConfigFileQueries(queriesInput) && QUERIES_PROPERTY in parsedYAML) {
if (
shouldAddConfigFileQueries(queriesInput) &&
QUERIES_PROPERTY in parsedYAML
) {
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
throw new Error(getQueriesInvalid(configFile));
}
for (const query of parsedYAML[QUERIES_PROPERTY]!) {
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
if (
!(QUERIES_USES_PROPERTY in query) ||
typeof query[QUERIES_USES_PROPERTY] !== "string"
) {
throw new Error(getQueryUsesInvalid(configFile));
}
await parseQueryUses(
@@ -719,7 +844,8 @@ async function loadConfig(
checkoutPath,
githubUrl,
logger,
configFile);
configFile
);
}
}
@@ -727,11 +853,13 @@ async function loadConfig(
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
throw new Error(getPathsIgnoreInvalid(configFile));
}
parsedYAML[PATHS_IGNORE_PROPERTY]!.forEach(path => {
if (typeof path !== "string" || path === '') {
parsedYAML[PATHS_IGNORE_PROPERTY]!.forEach((path) => {
if (typeof path !== "string" || path === "") {
throw new Error(getPathsIgnoreInvalid(configFile));
}
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile, logger));
pathsIgnore.push(
validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile, logger)
);
});
}
@@ -739,11 +867,13 @@ async function loadConfig(
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
throw new Error(getPathsInvalid(configFile));
}
parsedYAML[PATHS_PROPERTY]!.forEach(path => {
if (typeof path !== "string" || path === '') {
parsedYAML[PATHS_PROPERTY]!.forEach((path) => {
if (typeof path !== "string" || path === "") {
throw new Error(getPathsInvalid(configFile));
}
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile, logger));
paths.push(
validateAndSanitisePath(path, PATHS_PROPERTY, configFile, logger)
);
});
}
@@ -751,8 +881,10 @@ async function loadConfig(
// it is a user configuration error.
for (const language of languages) {
if (queries[language] === undefined || queries[language].length === 0) {
throw new Error(`Did not detect any queries to run for ${language}. ` +
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
throw new Error(
`Did not detect any queries to run for ${language}. ` +
"Please make sure that the default queries are enabled, or you are specifying queries to run."
);
}
}
@@ -785,13 +917,13 @@ export async function initConfig(
checkoutPath: string,
githubAuth: string,
githubUrl: string,
logger: Logger): Promise<Config> {
logger: Logger
): Promise<Config> {
let config: Config;
// If no config file was provided create an empty one
if (!configFile) {
logger.debug('No configuration file was provided');
logger.debug("No configuration file was provided");
config = await getDefaultConfig(
languagesInput,
queriesInput,
@@ -802,7 +934,8 @@ export async function initConfig(
checkoutPath,
githubAuth,
githubUrl,
logger);
logger
);
} else {
config = await loadConfig(
languagesInput,
@@ -815,7 +948,8 @@ export async function initConfig(
checkoutPath,
githubAuth,
githubUrl,
logger);
logger
);
}
// Save the config so we can easily access it again in the future
@@ -829,7 +963,7 @@ function isLocal(configPath: string): boolean {
return true;
}
return (configPath.indexOf("@") === -1);
return configPath.indexOf("@") === -1;
}
function getLocalConfig(configFile: string, checkoutPath: string): UserConfig {
@@ -843,28 +977,32 @@ function getLocalConfig(configFile: string, checkoutPath: string): UserConfig {
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
}
return yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
return yaml.safeLoad(fs.readFileSync(configFile, "utf8"));
}
async function getRemoteConfig(
configFile: string,
githubAuth: string,
githubUrl: string): Promise<UserConfig> {
githubUrl: string
): Promise<UserConfig> {
// retrieve the various parts of the config location, and ensure they're present
const format = new RegExp('(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)');
const format = new RegExp(
"(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)"
);
const pieces = format.exec(configFile);
// 5 = 4 groups + the whole expression
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
}
const response = await api.getApiClient(githubAuth, githubUrl, true).repos.getContents({
owner: pieces.groups.owner,
repo: pieces.groups.repo,
path: pieces.groups.path,
ref: pieces.groups.ref,
});
const response = await api
.getApiClient(githubAuth, githubUrl, true)
.repos.getContents({
owner: pieces.groups.owner,
repo: pieces.groups.repo,
path: pieces.groups.path,
ref: pieces.groups.ref,
});
let fileContents: string;
if ("content" in response.data && response.data.content !== undefined) {
@@ -875,14 +1013,14 @@ async function getRemoteConfig(
throw new Error(getConfigFileFormatInvalidMessage(configFile));
}
return yaml.safeLoad(Buffer.from(fileContents, 'base64').toString('binary'));
return yaml.safeLoad(Buffer.from(fileContents, "base64").toString("binary"));
}
/**
* Get the file path where the parsed config will be stored.
*/
export function getPathToParsedConfigFile(tempDir: string): string {
return path.join(tempDir, 'config');
return path.join(tempDir, "config");
}
/**
@@ -892,8 +1030,8 @@ async function saveConfig(config: Config, logger: Logger) {
const configString = JSON.stringify(config);
const configFile = getPathToParsedConfigFile(config.tempDir);
fs.mkdirSync(path.dirname(configFile), { recursive: true });
fs.writeFileSync(configFile, configString, 'utf8');
logger.debug('Saved config:');
fs.writeFileSync(configFile, configString, "utf8");
logger.debug("Saved config:");
logger.debug(configString);
}
@@ -901,13 +1039,16 @@ async function saveConfig(config: Config, logger: Logger) {
* Get the config that has been saved to the given temp dir.
* If the config could not be found then returns undefined.
*/
export async function getConfig(tempDir: string, logger: Logger): Promise<Config | undefined> {
export async function getConfig(
tempDir: string,
logger: Logger
): Promise<Config | undefined> {
const configFile = getPathToParsedConfigFile(tempDir);
if (!fs.existsSync(configFile)) {
return undefined;
}
const configString = fs.readFileSync(configFile, 'utf8');
logger.debug('Loaded config:');
const configString = fs.readFileSync(configFile, "utf8");
logger.debug("Loaded config:");
logger.debug(configString);
return JSON.parse(configString);
}

View File

@@ -1,45 +1,53 @@
import * as toolrunnner from '@actions/exec/lib/toolrunner';
import test from 'ava';
import * as toolrunnner from "@actions/exec/lib/toolrunner";
import test from "ava";
import * as fs from "fs";
import * as path from "path";
import * as externalQueries from "./external-queries";
import { getRunnerLogger } from './logging';
import {setupTests} from './testing-utils';
import { getRunnerLogger } from "./logging";
import { setupTests } from "./testing-utils";
import * as util from "./util";
setupTests(test);
test("checkoutExternalQueries", async t => {
await util.withTmpDir(async tmpDir => {
test("checkoutExternalQueries", async (t) => {
await util.withTmpDir(async (tmpDir) => {
// Create a test repo in a subdir of the temp dir.
// It should have a default branch with two commits after the initial commit, where
// - the first commit contains files 'a' and 'b'
// - the second commit contains only 'a'
// Place the repo in a subdir because we're going to checkout a copy in tmpDir
const testRepoBaseDir = path.join(tmpDir, 'test-repo-dir');
const repoName = 'some/repo';
const testRepoBaseDir = path.join(tmpDir, "test-repo-dir");
const repoName = "some/repo";
const repoPath = path.join(testRepoBaseDir, repoName);
const repoGitDir = path.join(repoPath, '.git');
const repoGitDir = path.join(repoPath, ".git");
// Run the given git command, and return the output.
// Passes --git-dir and --work-tree.
// Any stderr output is suppressed until the command fails.
const runGit = async function(command: string[]): Promise<string> {
let stdout = '';
let stderr = '';
command = [`--git-dir=${repoGitDir}`, `--work-tree=${repoPath}`, ...command];
console.log('Running: git ' + command.join(' '));
const runGit = async function (command: string[]): Promise<string> {
let stdout = "";
let stderr = "";
command = [
`--git-dir=${repoGitDir}`,
`--work-tree=${repoPath}`,
...command,
];
console.log(`Running: git ${command.join(" ")}`);
try {
await new toolrunnner.ToolRunner('git', command, {
await new toolrunnner.ToolRunner("git", command, {
silent: true,
listeners: {
stdout: (data) => { stdout += data.toString(); },
stderr: (data) => { stderr += data.toString(); },
}
stdout: (data) => {
stdout += data.toString();
},
stderr: (data) => {
stderr += data.toString();
},
},
}).exec();
} catch (e) {
console.log('Command failed: git ' + command.join(' '));
console.log(`Command failed: git ${command.join(" ")}`);
process.stderr.write(stderr);
throw e;
}
@@ -47,25 +55,23 @@ test("checkoutExternalQueries", async t => {
};
fs.mkdirSync(repoPath, { recursive: true });
await runGit(['init', repoPath]);
await runGit(['config', 'user.email', 'test@github.com']);
await runGit(['config', 'user.name', 'Test Test']);
await runGit(["init", repoPath]);
await runGit(["config", "user.email", "test@github.com"]);
await runGit(["config", "user.name", "Test Test"]);
fs.writeFileSync(path.join(repoPath, 'a'), 'a content');
await runGit(['add', 'a']);
await runGit(['commit', '-m', 'commit1']);
fs.writeFileSync(path.join(repoPath, 'b'), 'b content');
await runGit(['add', 'b']);
await runGit(['commit', '-m', 'commit1']);
const commit1Sha = await runGit(['rev-parse', 'HEAD']);
fs.unlinkSync(path.join(repoPath, 'b'));
await runGit(['add', 'b']);
await runGit(['commit', '-m', 'commit2']);
const commit2Sha = await runGit(['rev-parse', 'HEAD']);
fs.writeFileSync(path.join(repoPath, "a"), "a content");
await runGit(["add", "a"]);
await runGit(["commit", "-m", "commit1"]);
fs.writeFileSync(path.join(repoPath, "b"), "b content");
await runGit(["add", "b"]);
await runGit(["commit", "-m", "commit1"]);
const commit1Sha = await runGit(["rev-parse", "HEAD"]);
fs.unlinkSync(path.join(repoPath, "b"));
await runGit(["add", "b"]);
await runGit(["commit", "-m", "commit2"]);
const commit2Sha = await runGit(["rev-parse", "HEAD"]);
// Checkout the first commit, which should contain 'a' and 'b'
t.false(fs.existsSync(path.join(tmpDir, repoName)));
@@ -74,13 +80,12 @@ test("checkoutExternalQueries", async t => {
commit1Sha,
`file://${testRepoBaseDir}`,
tmpDir,
getRunnerLogger(true));
getRunnerLogger(true)
);
t.true(fs.existsSync(path.join(tmpDir, repoName)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, 'a')));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, 'b')));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a")));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b")));
// Checkout the second commit as well, which should only contain 'a'
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
@@ -89,9 +94,10 @@ test("checkoutExternalQueries", async t => {
commit2Sha,
`file://${testRepoBaseDir}`,
tmpDir,
getRunnerLogger(true));
getRunnerLogger(true)
);
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, 'a')));
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, 'b')));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a")));
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
});
});

View File

@@ -1,8 +1,8 @@
import * as toolrunnner from '@actions/exec/lib/toolrunner';
import * as fs from 'fs';
import * as path from 'path';
import * as toolrunnner from "@actions/exec/lib/toolrunner";
import * as fs from "fs";
import * as path from "path";
import { Logger } from './logging';
import { Logger } from "./logging";
/**
* Check out repository at the given ref, and return the directory of the checkout.
@@ -12,24 +12,31 @@ export async function checkoutExternalRepository(
ref: string,
githubUrl: string,
tempDir: string,
logger: Logger): Promise<string> {
logger.info('Checking out ' + repository);
logger: Logger
): Promise<string> {
logger.info(`Checking out ${repository}`);
const checkoutLocation = path.join(tempDir, repository, ref);
if (!checkoutLocation.startsWith(tempDir)) {
// this still permits locations that mess with sibling repositories in `tempDir`, but that is acceptable
throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`);
throw new Error(
`'${repository}@${ref}' is not a valid repository and reference.`
);
}
if (!fs.existsSync(checkoutLocation)) {
const repoURL = githubUrl + '/' + repository;
await new toolrunnner.ToolRunner('git', ['clone', repoURL, checkoutLocation]).exec();
await new toolrunnner.ToolRunner('git', [
'--work-tree=' + checkoutLocation,
'--git-dir=' + checkoutLocation + '/.git',
'checkout', ref,
const repoURL = `${githubUrl}/${repository}`;
await new toolrunnner.ToolRunner("git", [
"clone",
repoURL,
checkoutLocation,
]).exec();
await new toolrunnner.ToolRunner("git", [
`--work-tree=${checkoutLocation}`,
`--git-dir=${checkoutLocation}/.git`,
"checkout",
ref,
]).exec();
}

View File

@@ -1,17 +1,17 @@
import test from 'ava';
import test from "ava";
import * as ava from "ava";
import * as fs from 'fs';
import * as path from 'path';
import * as fs from "fs";
import * as path from "path";
import * as fingerprints from './fingerprints';
import { getRunnerLogger } from './logging';
import {setupTests} from './testing-utils';
import * as fingerprints from "./fingerprints";
import { getRunnerLogger } from "./logging";
import { setupTests } from "./testing-utils";
setupTests(test);
function testHash(t: ava.Assertions, input: string, expectedHashes: string[]) {
let index = 0;
let callback = function (lineNumber: number, hash: string) {
const callback = function (lineNumber: number, hash: string) {
t.is(lineNumber, index + 1);
t.is(hash, expectedHashes[index]);
index++;
@@ -20,187 +20,188 @@ function testHash(t: ava.Assertions, input: string, expectedHashes: string[]) {
t.is(index, input.split(/\r\n|\r|\n/).length);
}
test('hash', (t: ava.Assertions) => {
test("hash", (t: ava.Assertions) => {
// Try empty file
testHash(t, "", ["c129715d7a2bc9a3:1"]);
// Try various combinations of newline characters
testHash(
t,
" a\nb\n \t\tc\n d",
[
"271789c17abda88f:1",
"54703d4cd895b18:1",
"180aee12dab6264:1",
"a23a3dc5e078b07b:1"
]);
testHash(
t,
" hello; \t\nworld!!!\n\n\n \t\tGreetings\n End",
[
"8b7cf3e952e7aeb2:1",
"b1ae1287ec4718d9:1",
"bff680108adb0fcc:1",
"c6805c5e1288b612:1",
"b86d3392aea1be30:1",
"e6ceba753e1a442:1",
]);
testHash(
t,
" hello; \t\nworld!!!\n\n\n \t\tGreetings\n End\n",
[
"e9496ae3ebfced30:1",
"fb7c023a8b9ccb3f:1",
"ce8ba1a563dcdaca:1",
"e20e36e16fcb0cc8:1",
"b3edc88f2938467e:1",
"c8e28b0b4002a3a0:1",
"c129715d7a2bc9a3:1",
]);
testHash(
t,
" hello; \t\nworld!!!\r\r\r \t\tGreetings\r End\r",
[
"e9496ae3ebfced30:1",
"fb7c023a8b9ccb3f:1",
"ce8ba1a563dcdaca:1",
"e20e36e16fcb0cc8:1",
"b3edc88f2938467e:1",
"c8e28b0b4002a3a0:1",
"c129715d7a2bc9a3:1",
]);
testHash(
t,
" hello; \t\r\nworld!!!\r\n\r\n\r\n \t\tGreetings\r\n End\r\n",
[
"e9496ae3ebfced30:1",
"fb7c023a8b9ccb3f:1",
"ce8ba1a563dcdaca:1",
"e20e36e16fcb0cc8:1",
"b3edc88f2938467e:1",
"c8e28b0b4002a3a0:1",
"c129715d7a2bc9a3:1",
]);
testHash(
t,
" hello; \t\nworld!!!\r\n\n\r \t\tGreetings\r End\r\n",
[
"e9496ae3ebfced30:1",
"fb7c023a8b9ccb3f:1",
"ce8ba1a563dcdaca:1",
"e20e36e16fcb0cc8:1",
"b3edc88f2938467e:1",
"c8e28b0b4002a3a0:1",
"c129715d7a2bc9a3:1",
]);
testHash(t, " a\nb\n \t\tc\n d", [
"271789c17abda88f:1",
"54703d4cd895b18:1",
"180aee12dab6264:1",
"a23a3dc5e078b07b:1",
]);
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
"8b7cf3e952e7aeb2:1",
"b1ae1287ec4718d9:1",
"bff680108adb0fcc:1",
"c6805c5e1288b612:1",
"b86d3392aea1be30:1",
"e6ceba753e1a442:1",
]);
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End\n", [
"e9496ae3ebfced30:1",
"fb7c023a8b9ccb3f:1",
"ce8ba1a563dcdaca:1",
"e20e36e16fcb0cc8:1",
"b3edc88f2938467e:1",
"c8e28b0b4002a3a0:1",
"c129715d7a2bc9a3:1",
]);
testHash(t, " hello; \t\nworld!!!\r\r\r \t\tGreetings\r End\r", [
"e9496ae3ebfced30:1",
"fb7c023a8b9ccb3f:1",
"ce8ba1a563dcdaca:1",
"e20e36e16fcb0cc8:1",
"b3edc88f2938467e:1",
"c8e28b0b4002a3a0:1",
"c129715d7a2bc9a3:1",
]);
testHash(t, " hello; \t\r\nworld!!!\r\n\r\n\r\n \t\tGreetings\r\n End\r\n", [
"e9496ae3ebfced30:1",
"fb7c023a8b9ccb3f:1",
"ce8ba1a563dcdaca:1",
"e20e36e16fcb0cc8:1",
"b3edc88f2938467e:1",
"c8e28b0b4002a3a0:1",
"c129715d7a2bc9a3:1",
]);
testHash(t, " hello; \t\nworld!!!\r\n\n\r \t\tGreetings\r End\r\n", [
"e9496ae3ebfced30:1",
"fb7c023a8b9ccb3f:1",
"ce8ba1a563dcdaca:1",
"e20e36e16fcb0cc8:1",
"b3edc88f2938467e:1",
"c8e28b0b4002a3a0:1",
"c129715d7a2bc9a3:1",
]);
// Try repeating line that will generate identical hashes
testHash(
t,
"Lorem ipsum dolor sit amet.\n".repeat(10),
[
"a7f2ff13bc495cf2:1",
"a7f2ff13bc495cf2:2",
"a7f2ff13bc495cf2:3",
"a7f2ff13bc495cf2:4",
"a7f2ff13bc495cf2:5",
"a7f2ff13bc495cf2:6",
"a7f2ff1481e87703:1",
"a9cf91f7bbf1862b:1",
"55ec222b86bcae53:1",
"cc97dc7b1d7d8f7b:1",
"c129715d7a2bc9a3:1"
]);
testHash(t, "Lorem ipsum dolor sit amet.\n".repeat(10), [
"a7f2ff13bc495cf2:1",
"a7f2ff13bc495cf2:2",
"a7f2ff13bc495cf2:3",
"a7f2ff13bc495cf2:4",
"a7f2ff13bc495cf2:5",
"a7f2ff13bc495cf2:6",
"a7f2ff1481e87703:1",
"a9cf91f7bbf1862b:1",
"55ec222b86bcae53:1",
"cc97dc7b1d7d8f7b:1",
"c129715d7a2bc9a3:1",
]);
testHash(
t,
"x = 2\nx = 1\nprint(x)\nx = 3\nprint(x)\nx = 4\nprint(x)\n",
[
"e54938cc54b302f1:1",
"bb609acbe9138d60:1",
"1131fd5871777f34:1",
"5c482a0f8b35ea28:1",
"54517377da7028d2:1",
"2c644846cb18d53e:1",
"f1b89f20de0d133:1",
"c129715d7a2bc9a3:1"
]);
testHash(t, "x = 2\nx = 1\nprint(x)\nx = 3\nprint(x)\nx = 4\nprint(x)\n", [
"e54938cc54b302f1:1",
"bb609acbe9138d60:1",
"1131fd5871777f34:1",
"5c482a0f8b35ea28:1",
"54517377da7028d2:1",
"2c644846cb18d53e:1",
"f1b89f20de0d133:1",
"c129715d7a2bc9a3:1",
]);
});
function testResolveUriToFile(uri: any, index: any, artifactsURIs: any[]) {
const location = { "uri": uri, "index": index };
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
return fingerprints.resolveUriToFile(location, artifacts, process.cwd(), getRunnerLogger(true));
const location = { uri, index };
const artifacts = artifactsURIs.map((uri) => ({ location: { uri } }));
return fingerprints.resolveUriToFile(
location,
artifacts,
process.cwd(),
getRunnerLogger(true)
);
}
test('resolveUriToFile', t => {
test("resolveUriToFile", (t) => {
// The resolveUriToFile method checks that the file exists and is in the right directory
// so we need to give it real files to look at. We will use this file as an example.
// For this to work we require the current working directory to be a parent, but this
// should generally always be the case so this is fine.
const cwd = process.cwd();
const filepath = __filename;
t.true(filepath.startsWith(cwd + '/'));
t.true(filepath.startsWith(`${cwd}/`));
const relativeFilepaht = filepath.substring(cwd.length + 1);
// Absolute paths are unmodified
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
t.is(testResolveUriToFile(`file://${filepath}`, undefined, []), filepath);
// Relative paths are made absolute
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
t.is(
testResolveUriToFile(`file://${relativeFilepaht}`, undefined, []),
filepath
);
// Absolute paths outside the src root are discarded
t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
t.is(testResolveUriToFile("/src/foo/bar.js", undefined, []), undefined);
t.is(
testResolveUriToFile("file:///src/foo/bar.js", undefined, []),
undefined
);
// Other schemes are discarded
t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
t.is(testResolveUriToFile(`https://${filepath}`, undefined, []), undefined);
t.is(testResolveUriToFile(`ftp://${filepath}`, undefined, []), undefined);
// Invalid URIs are discarded
t.is(testResolveUriToFile(1, undefined, []), undefined);
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
// Non-existant files are discarded
t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
t.is(testResolveUriToFile(`${filepath}2`, undefined, []), undefined);
// Index is resolved
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
t.is(testResolveUriToFile(undefined, 1, ["foo", filepath]), filepath);
// Invalid indexes are discarded
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
t.is(testResolveUriToFile(undefined, "0", [filepath]), undefined);
});
test('addFingerprints', t => {
test("addFingerprints", (t) => {
// Run an end-to-end test on a test file
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
let input = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
.toString();
let expected = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.expected.sarif`)
.toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
const checkoutPath = path.normalize(__dirname + '/../src/testdata');
const checkoutPath = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, getRunnerLogger(true)), expected);
t.deepEqual(
fingerprints.addFingerprints(input, checkoutPath, getRunnerLogger(true)),
expected
);
});
test('missingRegions', t => {
test("missingRegions", (t) => {
// Run an end-to-end test on a test file
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
let input = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
.toString();
let expected = fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.expected.sarif`)
.toString();
// The test files are stored prettified, but addFingerprints outputs condensed JSON
input = JSON.stringify(JSON.parse(input));
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
const checkoutPath = path.normalize(__dirname + '/../src/testdata');
const checkoutPath = path.normalize(`${__dirname}/../src/testdata`);
t.deepEqual(fingerprints.addFingerprints(input, checkoutPath, getRunnerLogger(true)), expected);
t.deepEqual(
fingerprints.addFingerprints(input, checkoutPath, getRunnerLogger(true)),
expected
);
});

View File

@@ -1,12 +1,12 @@
import * as fs from 'fs';
import Long from 'long';
import * as fs from "fs";
import Long from "long";
import { Logger } from './logging';
import { Logger } from "./logging";
const tab = '\t'.charCodeAt(0);
const space = ' '.charCodeAt(0);
const lf = '\n'.charCodeAt(0);
const cr = '\r'.charCodeAt(0);
const tab = "\t".charCodeAt(0);
const space = " ".charCodeAt(0);
const lf = "\n".charCodeAt(0);
const cr = "\r".charCodeAt(0);
const BLOCK_SIZE = 100;
const MOD = Long.fromInt(37); // L
@@ -46,7 +46,7 @@ export function hash(callback: hashCallback, input: string) {
// The current hash value, updated as we read each character
let hash = Long.ZERO;
let firstMod = computeFirstMod();
const firstMod = computeFirstMod();
// The current index in the window, will wrap around to zero when we reach BLOCK_SIZE
let index = 0;
@@ -62,12 +62,12 @@ export function hash(callback: hashCallback, input: string) {
// Output the current hash and line number to the callback function
const outputHash = function () {
let hashValue = hash.toUnsigned().toString(16);
const hashValue = hash.toUnsigned().toString(16);
if (!hashCounts[hashValue]) {
hashCounts[hashValue] = 0;
}
hashCounts[hashValue]++;
callback(lineNumbers[index], hashValue + ":" + hashCounts[hashValue]);
callback(lineNumbers[index], `${hashValue}:${hashCounts[hashValue]}`);
lineNumbers[index] = -1;
};
@@ -125,7 +125,11 @@ export function hash(callback: hashCallback, input: string) {
// Generate a hash callback function that updates the given result in-place
// when it recieves a hash for the correct line number. Ignores hashes for other lines.
function locationUpdateCallback(result: any, location: any, logger: Logger): hashCallback {
function locationUpdateCallback(
result: any,
location: any,
logger: Logger
): hashCallback {
let locationStartLine = location.physicalLocation?.region?.startLine;
if (locationStartLine === undefined) {
// We expect the region section to be present, but it can be absent if the
@@ -142,17 +146,17 @@ function locationUpdateCallback(result: any, location: any, logger: Logger): has
if (!result.partialFingerprints) {
result.partialFingerprints = {};
}
const existingFingerprint = result.partialFingerprints.primaryLocationLineHash;
const existingFingerprint =
result.partialFingerprints.primaryLocationLineHash;
// If the hash doesn't match the existing fingerprint then
// output a warning and don't overwrite it.
if (!existingFingerprint) {
result.partialFingerprints.primaryLocationLineHash = hash;
} else if (existingFingerprint !== hash) {
logger.warning('Calculated fingerprint of ' + hash +
' for file ' + location.physicalLocation.artifactLocation.uri +
' line ' + lineNumber +
', but found existing inconsistent fingerprint value ' + existingFingerprint);
logger.warning(
`Calculated fingerprint of ${hash} for file ${location.physicalLocation.artifactLocation.uri} line ${lineNumber}, but found existing inconsistent fingerprint value ${existingFingerprint}`
);
}
};
}
@@ -165,14 +169,16 @@ export function resolveUriToFile(
location: any,
artifacts: any[],
checkoutPath: string,
logger: Logger): string | undefined {
logger: Logger
): string | undefined {
// This may be referencing an artifact
if (!location.uri && location.index !== undefined) {
if (typeof location.index !== 'number' ||
if (
typeof location.index !== "number" ||
location.index < 0 ||
location.index >= artifacts.length ||
typeof artifacts[location.index].location !== 'object') {
typeof artifacts[location.index].location !== "object"
) {
logger.debug(`Ignoring location as URI "${location.index}" is invalid`);
return undefined;
}
@@ -180,33 +186,37 @@ export function resolveUriToFile(
}
// Get the URI and decode
if (typeof location.uri !== 'string') {
if (typeof location.uri !== "string") {
logger.debug(`Ignoring location as index "${location.uri}" is invalid`);
return undefined;
}
let uri = decodeURIComponent(location.uri);
// Remove a file scheme, and abort if the scheme is anything else
const fileUriPrefix = 'file://';
const fileUriPrefix = "file://";
if (uri.startsWith(fileUriPrefix)) {
uri = uri.substring(fileUriPrefix.length);
}
if (uri.indexOf('://') !== -1) {
logger.debug(`Ignoring location URI "${uri}" as the scheme is not recognised`);
if (uri.indexOf("://") !== -1) {
logger.debug(
`Ignoring location URI "${uri}" as the scheme is not recognised`
);
return undefined;
}
// Discard any absolute paths that aren't in the src root
const srcRootPrefix = checkoutPath + '/';
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
logger.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
const srcRootPrefix = `${checkoutPath}/`;
if (uri.startsWith("/") && !uri.startsWith(srcRootPrefix)) {
logger.debug(
`Ignoring location URI "${uri}" as it is outside of the src root`
);
return undefined;
}
// Just assume a relative path is relative to the src root.
// This is not necessarily true but should be a good approximation
// and here we likely want to err on the side of handling more cases.
if (!uri.startsWith('/')) {
if (!uri.startsWith("/")) {
uri = srcRootPrefix + uri;
}
@@ -221,21 +231,29 @@ export function resolveUriToFile(
// Compute fingerprints for results in the given sarif file
// and return an updated sarif file contents.
export function addFingerprints(sarifContents: string, checkoutPath: string, logger: Logger): string {
let sarif = JSON.parse(sarifContents);
export function addFingerprints(
sarifContents: string,
checkoutPath: string,
logger: Logger
): string {
const sarif = JSON.parse(sarifContents);
// Gather together results for the same file and construct
// callbacks to accept hashes for that file and update the location
const callbacksByFile: { [filename: string]: hashCallback[] } = {};
for (const run of sarif.runs || []) {
// We may need the list of artifacts to resolve against
let artifacts = run.artifacts || [];
const artifacts = run.artifacts || [];
for (const result of run.results || []) {
// Check the primary location is defined correctly and is in the src root
const primaryLocation = (result.locations || [])[0];
if (!primaryLocation?.physicalLocation?.artifactLocation) {
logger.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
logger.debug(
`Unable to compute fingerprint for invalid location: ${JSON.stringify(
primaryLocation
)}`
);
continue;
}
@@ -243,14 +261,17 @@ export function addFingerprints(sarifContents: string, checkoutPath: string, log
primaryLocation.physicalLocation.artifactLocation,
artifacts,
checkoutPath,
logger);
logger
);
if (!filepath) {
continue;
}
if (!callbacksByFile[filepath]) {
callbacksByFile[filepath] = [];
}
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation, logger));
callbacksByFile[filepath].push(
locationUpdateCallback(result, primaryLocation, logger)
);
}
}
@@ -258,7 +279,7 @@ export function addFingerprints(sarifContents: string, checkoutPath: string, log
Object.entries(callbacksByFile).forEach(([filepath, callbacks]) => {
// A callback that forwards the hash to all other callbacks for that file
const teeCallback = function (lineNumber: number, hash: string) {
Object.values(callbacks).forEach(c => c(lineNumber, hash));
Object.values(callbacks).forEach((c) => c(lineNumber, hash));
};
const fileContents = fs.readFileSync(filepath).toString();
hash(teeCallback, fileContents);

View File

@@ -1,11 +1,11 @@
import * as core from '@actions/core';
import * as core from "@actions/core";
import { CodeQL } from './codeql';
import * as configUtils from './config-utils';
import { initCodeQL, initConfig, injectWindowsTracer, runInit } from './init';
import { getActionsLogger } from './logging';
import { parseRepositoryNwo } from './repository';
import * as util from './util';
import { CodeQL } from "./codeql";
import * as configUtils from "./config-utils";
import { initCodeQL, initConfig, injectWindowsTracer, runInit } from "./init";
import { getActionsLogger } from "./logging";
import { parseRepositoryNwo } from "./repository";
import * as util from "./util";
interface InitSuccessStatusReport extends util.StatusReportBase {
// Comma-separated list of languages that analysis was run for
@@ -23,24 +23,39 @@ interface InitSuccessStatusReport extends util.StatusReportBase {
queries: string;
}
async function sendSuccessStatusReport(startedAt: Date, config: configUtils.Config) {
const statusReportBase = await util.createStatusReportBase('init', 'success', startedAt);
async function sendSuccessStatusReport(
startedAt: Date,
config: configUtils.Config
) {
const statusReportBase = await util.createStatusReportBase(
"init",
"success",
startedAt
);
const languages = config.languages.join(',');
const workflowLanguages = core.getInput('languages', { required: false });
const paths = (config.originalUserInput.paths || []).join(',');
const pathsIgnore = (config.originalUserInput['paths-ignore'] || []).join(',');
const disableDefaultQueries = config.originalUserInput['disable-default-queries'] ? languages : '';
const queries = (config.originalUserInput.queries || []).map(q => q.uses).join(',');
const languages = config.languages.join(",");
const workflowLanguages = core.getInput("languages", { required: false });
const paths = (config.originalUserInput.paths || []).join(",");
const pathsIgnore = (config.originalUserInput["paths-ignore"] || []).join(
","
);
const disableDefaultQueries = config.originalUserInput[
"disable-default-queries"
]
? languages
: "";
const queries = (config.originalUserInput.queries || [])
.map((q) => q.uses)
.join(",");
const statusReport: InitSuccessStatusReport = {
...statusReportBase,
languages: languages,
languages,
workflow_languages: workflowLanguages,
paths: paths,
paths,
paths_ignore: pathsIgnore,
disable_default_queries: disableDefaultQueries,
queries: queries,
queries,
};
await util.sendStatusReport(statusReport);
@@ -54,75 +69,94 @@ async function run() {
try {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('init', 'starting', startedAt), true)) {
if (
!(await util.sendStatusReport(
await util.createStatusReportBase("init", "starting", startedAt),
true
))
) {
return;
}
codeql = await initCodeQL(
core.getInput('tools'),
core.getInput('token'),
util.getRequiredEnvParam('GITHUB_SERVER_URL'),
util.getRequiredEnvParam('RUNNER_TEMP'),
util.getRequiredEnvParam('RUNNER_TOOL_CACHE'),
'actions',
logger);
core.getInput("tools"),
core.getInput("token"),
util.getRequiredEnvParam("GITHUB_SERVER_URL"),
util.getRequiredEnvParam("RUNNER_TEMP"),
util.getRequiredEnvParam("RUNNER_TOOL_CACHE"),
"actions",
logger
);
config = await initConfig(
core.getInput('languages'),
core.getInput('queries'),
core.getInput('config-file'),
parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')),
util.getRequiredEnvParam('RUNNER_TEMP'),
util.getRequiredEnvParam('RUNNER_TOOL_CACHE'),
core.getInput("languages"),
core.getInput("queries"),
core.getInput("config-file"),
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
util.getRequiredEnvParam("RUNNER_TEMP"),
util.getRequiredEnvParam("RUNNER_TOOL_CACHE"),
codeql,
util.getRequiredEnvParam('GITHUB_WORKSPACE'),
core.getInput('token'),
util.getRequiredEnvParam('GITHUB_SERVER_URL'),
logger);
util.getRequiredEnvParam("GITHUB_WORKSPACE"),
core.getInput("token"),
util.getRequiredEnvParam("GITHUB_SERVER_URL"),
logger
);
} catch (e) {
core.setFailed(e.message);
console.log(e);
await util.sendStatusReport(await util.createStatusReportBase('init', 'aborted', startedAt, e.message));
await util.sendStatusReport(
await util.createStatusReportBase("init", "aborted", startedAt, e.message)
);
return;
}
try {
// Forward Go flags
const goFlags = process.env['GOFLAGS'];
const goFlags = process.env["GOFLAGS"];
if (goFlags) {
core.exportVariable('GOFLAGS', goFlags);
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
core.exportVariable("GOFLAGS", goFlags);
core.warning(
"Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action."
);
}
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
core.exportVariable('CODEQL_RAM', codeqlRam);
const codeqlRam = process.env["CODEQL_RAM"] || "6500";
core.exportVariable("CODEQL_RAM", codeqlRam);
const tracerConfig = await runInit(codeql, config);
if (tracerConfig !== undefined) {
Object.entries(tracerConfig.env).forEach(([key, value]) => core.exportVariable(key, value));
Object.entries(tracerConfig.env).forEach(([key, value]) =>
core.exportVariable(key, value)
);
if (process.platform === 'win32') {
await injectWindowsTracer('Runner.Worker.exe', undefined, config, codeql, tracerConfig);
if (process.platform === "win32") {
await injectWindowsTracer(
"Runner.Worker.exe",
undefined,
config,
codeql,
tracerConfig
);
}
}
} catch (error) {
core.setFailed(error.message);
console.log(error);
await util.sendStatusReport(await util.createStatusReportBase(
'init',
'failure',
startedAt,
error.message,
error.stack));
await util.sendStatusReport(
await util.createStatusReportBase(
"init",
"failure",
startedAt,
error.message,
error.stack
)
);
return;
}
await sendSuccessStatusReport(startedAt, config);
}
run().catch(e => {
core.setFailed("init action failed: " + e);
run().catch((e) => {
core.setFailed(`init action failed: ${e}`);
console.log(e);
});

View File

@@ -1,14 +1,14 @@
import * as toolrunnner from '@actions/exec/lib/toolrunner';
import * as fs from 'fs';
import * as path from 'path';
import * as toolrunnner from "@actions/exec/lib/toolrunner";
import * as fs from "fs";
import * as path from "path";
import * as analysisPaths from './analysis-paths';
import { CodeQL, setupCodeQL } from './codeql';
import * as configUtils from './config-utils';
import { Logger } from './logging';
import { RepositoryNwo } from './repository';
import { getCombinedTracerConfig, TracerConfig } from './tracer-config';
import * as util from './util';
import * as analysisPaths from "./analysis-paths";
import { CodeQL, setupCodeQL } from "./codeql";
import * as configUtils from "./config-utils";
import { Logger } from "./logging";
import { RepositoryNwo } from "./repository";
import { TracerConfig, getCombinedTracerConfig } from "./tracer-config";
import * as util from "./util";
export async function initCodeQL(
codeqlURL: string | undefined,
@@ -17,9 +17,9 @@ export async function initCodeQL(
tempDir: string,
toolsDir: string,
mode: util.Mode,
logger: Logger): Promise<CodeQL> {
logger.startGroup('Setup CodeQL tools');
logger: Logger
): Promise<CodeQL> {
logger.startGroup("Setup CodeQL tools");
const codeql = await setupCodeQL(
codeqlURL,
githubAuth,
@@ -27,7 +27,8 @@ export async function initCodeQL(
tempDir,
toolsDir,
mode,
logger);
logger
);
await codeql.printVersion();
logger.endGroup();
return codeql;
@@ -44,9 +45,9 @@ export async function initConfig(
checkoutPath: string,
githubAuth: string,
githubUrl: string,
logger: Logger): Promise<configUtils.Config> {
logger.startGroup('Load language configuration');
logger: Logger
): Promise<configUtils.Config> {
logger.startGroup("Load language configuration");
const config = await configUtils.initConfig(
languagesInput,
queriesInput,
@@ -58,7 +59,8 @@ export async function initConfig(
checkoutPath,
githubAuth,
githubUrl,
logger);
logger
);
analysisPaths.printPathFiltersWarning(config, logger);
logger.endGroup();
return config;
@@ -66,16 +68,20 @@ export async function initConfig(
export async function runInit(
codeql: CodeQL,
config: configUtils.Config): Promise<TracerConfig | undefined> {
config: configUtils.Config
): Promise<TracerConfig | undefined> {
const sourceRoot = path.resolve();
fs.mkdirSync(util.getCodeQLDatabasesDir(config.tempDir), { recursive: true });
// TODO: replace this code once CodeQL supports multi-language tracing
for (let language of config.languages) {
for (const language of config.languages) {
// Init language database
await codeql.databaseInit(util.getCodeQLDatabasePath(config.tempDir, language), language, sourceRoot);
await codeql.databaseInit(
util.getCodeQLDatabasePath(config.tempDir, language),
language,
sourceRoot
);
}
return await getCombinedTracerConfig(config, codeql);
@@ -91,8 +97,8 @@ export async function injectWindowsTracer(
processLevel: number | undefined,
config: configUtils.Config,
codeql: CodeQL,
tracerConfig: TracerConfig) {
tracerConfig: TracerConfig
) {
let script: string;
if (processName !== undefined) {
script = `
@@ -155,15 +161,23 @@ export async function injectWindowsTracer(
Invoke-Expression "&$tracer --inject=$id"`;
}
const injectTracerPath = path.join(config.tempDir, 'inject-tracer.ps1');
const injectTracerPath = path.join(config.tempDir, "inject-tracer.ps1");
fs.writeFileSync(injectTracerPath, script);
await new toolrunnner.ToolRunner(
'powershell',
"powershell",
[
'-ExecutionPolicy', 'Bypass',
'-file', injectTracerPath,
path.resolve(path.dirname(codeql.getPath()), 'tools', 'win64', 'tracer.exe'),
"-ExecutionPolicy",
"Bypass",
"-file",
injectTracerPath,
path.resolve(
path.dirname(codeql.getPath()),
"tools",
"win64",
"tracer.exe"
),
],
{ env: { 'ODASA_TRACER_CONFIGURATION': tracerConfig.spec } }).exec();
{ env: { ODASA_TRACER_CONFIGURATION: tracerConfig.spec } }
).exec();
}

View File

@@ -1,32 +1,37 @@
import test from 'ava';
import test from "ava";
import {isScannedLanguage, isTracedLanguage, Language, parseLanguage} from './languages';
import {setupTests} from './testing-utils';
import {
Language,
isScannedLanguage,
isTracedLanguage,
parseLanguage,
} from "./languages";
import { setupTests } from "./testing-utils";
setupTests(test);
test('parseLangauge', async t => {
test("parseLangauge", async (t) => {
// Exact matches
t.deepEqual(parseLanguage('csharp'), Language.csharp);
t.deepEqual(parseLanguage('cpp'), Language.cpp);
t.deepEqual(parseLanguage('go'), Language.go);
t.deepEqual(parseLanguage('java'), Language.java);
t.deepEqual(parseLanguage('javascript'), Language.javascript);
t.deepEqual(parseLanguage('python'), Language.python);
t.deepEqual(parseLanguage("csharp"), Language.csharp);
t.deepEqual(parseLanguage("cpp"), Language.cpp);
t.deepEqual(parseLanguage("go"), Language.go);
t.deepEqual(parseLanguage("java"), Language.java);
t.deepEqual(parseLanguage("javascript"), Language.javascript);
t.deepEqual(parseLanguage("python"), Language.python);
// Aliases
t.deepEqual(parseLanguage('c'), Language.cpp);
t.deepEqual(parseLanguage('c++'), Language.cpp);
t.deepEqual(parseLanguage('c#'), Language.csharp);
t.deepEqual(parseLanguage('typescript'), Language.javascript);
t.deepEqual(parseLanguage("c"), Language.cpp);
t.deepEqual(parseLanguage("c++"), Language.cpp);
t.deepEqual(parseLanguage("c#"), Language.csharp);
t.deepEqual(parseLanguage("typescript"), Language.javascript);
// Not matches
t.deepEqual(parseLanguage('foo'), undefined);
t.deepEqual(parseLanguage(' '), undefined);
t.deepEqual(parseLanguage(''), undefined);
t.deepEqual(parseLanguage("foo"), undefined);
t.deepEqual(parseLanguage(" "), undefined);
t.deepEqual(parseLanguage(""), undefined);
});
test('isTracedLanguage', async t => {
test("isTracedLanguage", async (t) => {
t.true(isTracedLanguage(Language.cpp));
t.true(isTracedLanguage(Language.java));
t.true(isTracedLanguage(Language.csharp));
@@ -36,7 +41,7 @@ test('isTracedLanguage', async t => {
t.false(isTracedLanguage(Language.python));
});
test('isScannedLanguage', async t => {
test("isScannedLanguage", async (t) => {
t.false(isScannedLanguage(Language.cpp));
t.false(isScannedLanguage(Language.java));
t.false(isScannedLanguage(Language.csharp));

View File

@@ -1,19 +1,19 @@
// All the languages supported by CodeQL
export enum Language {
csharp = 'csharp',
cpp = 'cpp',
go = 'go',
java = 'java',
javascript = 'javascript',
python = 'python',
csharp = "csharp",
cpp = "cpp",
go = "go",
java = "java",
javascript = "javascript",
python = "python",
}
// Additional names for languages
const LANGUAGE_ALIASES: {[lang: string]: Language} = {
'c': Language.cpp,
'c++': Language.cpp,
'c#': Language.csharp,
'typescript': Language.javascript,
const LANGUAGE_ALIASES: { [lang: string]: Language } = {
c: Language.cpp,
"c++": Language.cpp,
"c#": Language.csharp,
typescript: Language.javascript,
};
// Translate from user input or GitHub's API names for languages to CodeQL's names for languages
@@ -34,9 +34,8 @@ export function parseLanguage(language: string): Language | undefined {
return undefined;
}
export function isTracedLanguage(language: Language): boolean {
return ['cpp', 'java', 'csharp'].includes(language);
return ["cpp", "java", "csharp"].includes(language);
}
export function isScannedLanguage(language: Language): boolean {

View File

@@ -1,4 +1,4 @@
import * as core from '@actions/core';
import * as core from "@actions/core";
export interface Logger {
debug: (message: string) => void;

View File

@@ -5,7 +5,7 @@ export interface RepositoryNwo {
}
export function parseRepositoryNwo(input: string): RepositoryNwo {
const parts = input.split('/');
const parts = input.split("/");
if (parts.length !== 2) {
throw new Error(`"${input}" is not a valid repository name`);
}

View File

@@ -1,21 +1,21 @@
import { Command } from 'commander';
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import { Command } from "commander";
import * as fs from "fs";
import * as os from "os";
import * as path from "path";
import { runAnalyze } from './analyze';
import { determineAutobuildLanguage, runAutobuild } from './autobuild';
import { CodeQL, getCodeQL } from './codeql';
import { Config, getConfig } from './config-utils';
import { initCodeQL, initConfig, injectWindowsTracer, runInit } from './init';
import { Language, parseLanguage } from './languages';
import { getRunnerLogger } from './logging';
import { parseRepositoryNwo } from './repository';
import * as upload_lib from './upload-lib';
import { getAddSnippetsFlag, getMemoryFlag, getThreadsFlag } from './util';
import { runAnalyze } from "./analyze";
import { determineAutobuildLanguage, runAutobuild } from "./autobuild";
import { CodeQL, getCodeQL } from "./codeql";
import { Config, getConfig } from "./config-utils";
import { initCodeQL, initConfig, injectWindowsTracer, runInit } from "./init";
import { Language, parseLanguage } from "./languages";
import { getRunnerLogger } from "./logging";
import { parseRepositoryNwo } from "./repository";
import * as upload_lib from "./upload-lib";
import { getAddSnippetsFlag, getMemoryFlag, getThreadsFlag } from "./util";
const program = new Command();
program.version('0.0.1');
program.version("0.0.1");
function parseGithubUrl(inputUrl: string): string {
try {
@@ -23,24 +23,23 @@ function parseGithubUrl(inputUrl: string): string {
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === 'github.com' || url.hostname === 'api.github.com') {
return 'https://github.com';
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
return "https://github.com";
}
// Remove the API prefix if it's present
if (url.pathname.indexOf('/api/v3') !== -1) {
url.pathname = url.pathname.substring(0, url.pathname.indexOf('/api/v3'));
if (url.pathname.indexOf("/api/v3") !== -1) {
url.pathname = url.pathname.substring(0, url.pathname.indexOf("/api/v3"));
}
return url.toString();
} catch (e) {
throw new Error(`"${inputUrl}" is not a valid URL`);
}
}
function getTempDir(userInput: string | undefined): string {
const tempDir = path.join(userInput || process.cwd(), 'codeql-runner');
const tempDir = path.join(userInput || process.cwd(), "codeql-runner");
if (!fs.existsSync(tempDir)) {
fs.mkdirSync(tempDir, { recursive: true });
}
@@ -48,38 +47,38 @@ function getTempDir(userInput: string | undefined): string {
}
function getToolsDir(userInput: string | undefined): string {
const toolsDir = userInput || path.join(os.homedir(), 'codeql-runner-tools');
const toolsDir = userInput || path.join(os.homedir(), "codeql-runner-tools");
if (!fs.existsSync(toolsDir)) {
fs.mkdirSync(toolsDir, { recursive: true });
}
return toolsDir;
}
const codeqlEnvJsonFilename = 'codeql-env.json';
const codeqlEnvJsonFilename = "codeql-env.json";
// Imports the environment from codeqlEnvJsonFilename if not already present
function importTracerEnvironment(config: Config) {
if (!('ODASA_TRACER_CONFIGURATION' in process.env)) {
if (!("ODASA_TRACER_CONFIGURATION" in process.env)) {
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
const env = JSON.parse(fs.readFileSync(jsonEnvFile).toString('utf-8'));
Object.keys(env).forEach(key => process.env[key] = env[key]);
const env = JSON.parse(fs.readFileSync(jsonEnvFile).toString("utf-8"));
Object.keys(env).forEach((key) => (process.env[key] = env[key]));
}
}
// Allow the user to specify refs in full refs/heads/branch format
// or just the short branch name and prepend "refs/heads/" to it.
function parseRef(userInput: string): string {
if (userInput.startsWith('refs/')) {
if (userInput.startsWith("refs/")) {
return userInput;
} else {
return 'refs/heads/' + userInput;
return `refs/heads/${userInput}`;
}
}
// Parses the --trace-process-name arg from process.argv, or returns undefined
function parseTraceProcessName(): string | undefined {
for (let i = 0; i < process.argv.length - 1; i++) {
if (process.argv[i] === '--trace-process-name') {
if (process.argv[i] === "--trace-process-name") {
return process.argv[i + 1];
}
}
@@ -89,7 +88,7 @@ function parseTraceProcessName(): string | undefined {
// Parses the --trace-process-level arg from process.argv, or returns undefined
function parseTraceProcessLevel(): number | undefined {
for (let i = 0; i < process.argv.length - 1; i++) {
if (process.argv[i] === '--trace-process-level') {
if (process.argv[i] === "--trace-process-level") {
const v = parseInt(process.argv[i + 1], 10);
return isNaN(v) ? undefined : v;
}
@@ -112,19 +111,40 @@ interface InitArgs {
}
program
.command('init')
.description('Initializes CodeQL')
.requiredOption('--repository <repository>', 'Repository name. (Required)')
.requiredOption('--github-url <url>', 'URL of GitHub instance. (Required)')
.requiredOption('--github-auth <auth>', 'GitHub Apps token or personal access token. (Required)')
.option('--languages <languages>', 'Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.')
.option('--queries <queries>', 'Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.')
.option('--config-file <file>', 'Path to config file.')
.option('--codeql-path <path>', 'Path to a copy of the CodeQL CLI executable to use. Otherwise downloads a copy.')
.option('--temp-dir <dir>', 'Directory to use for temporary files. Default is "./codeql-runner".')
.option('--tools-dir <dir>', 'Directory to use for CodeQL tools and other files to store between runs. Default is a subdirectory of the home directory.')
.option('--checkout-path <path>', 'Checkout path. Default is the current working directory.')
.option('--debug', 'Print more verbose output', false)
.command("init")
.description("Initializes CodeQL")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption(
"--github-auth <auth>",
"GitHub Apps token or personal access token. (Required)"
)
.option(
"--languages <languages>",
"Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo."
)
.option(
"--queries <queries>",
"Comma-separated list of additional queries to run. This overrides the same setting in a configuration file."
)
.option("--config-file <file>", "Path to config file.")
.option(
"--codeql-path <path>",
"Path to a copy of the CodeQL CLI executable to use. Otherwise downloads a copy."
)
.option(
"--temp-dir <dir>",
'Directory to use for temporary files. Default is "./codeql-runner".'
)
.option(
"--tools-dir <dir>",
"Directory to use for CodeQL tools and other files to store between runs. Default is a subdirectory of the home directory."
)
.option(
"--checkout-path <path>",
"Checkout path. Default is the current working directory."
)
.option("--debug", "Print more verbose output", false)
// This prevents a message like: error: unknown option '--trace-process-level'
// Remove this if commander.js starts supporting hidden options.
.allowUnknownOption()
@@ -149,8 +169,9 @@ program
parseGithubUrl(cmd.githubUrl),
tempDir,
toolsDir,
'runner',
logger);
"runner",
logger
);
}
const config = await initConfig(
@@ -164,60 +185,64 @@ program
cmd.checkoutPath || process.cwd(),
cmd.githubAuth,
parseGithubUrl(cmd.githubUrl),
logger);
logger
);
const tracerConfig = await runInit(codeql, config);
if (tracerConfig === undefined) {
return;
}
if (process.platform === 'win32') {
if (process.platform === "win32") {
await injectWindowsTracer(
parseTraceProcessName(),
parseTraceProcessLevel(),
config,
codeql,
tracerConfig);
tracerConfig
);
}
// Always output a json file of the env that can be consumed programatically
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
fs.writeFileSync(jsonEnvFile, JSON.stringify(tracerConfig.env));
if (process.platform === 'win32') {
const batEnvFile = path.join(config.tempDir, 'codeql-env.bat');
if (process.platform === "win32") {
const batEnvFile = path.join(config.tempDir, "codeql-env.bat");
const batEnvFileContents = Object.entries(tracerConfig.env)
.map(([key, value]) => `Set ${key}=${value}`)
.join('\n');
.join("\n");
fs.writeFileSync(batEnvFile, batEnvFileContents);
const powershellEnvFile = path.join(config.tempDir, 'codeql-env.sh');
const powershellEnvFile = path.join(config.tempDir, "codeql-env.sh");
const powershellEnvFileContents = Object.entries(tracerConfig.env)
.map(([key, value]) => `$env:${key}="${value}"`)
.join('\n');
.join("\n");
fs.writeFileSync(powershellEnvFile, powershellEnvFileContents);
logger.info(`\nCodeQL environment output to "${jsonEnvFile}", "${batEnvFile}" and "${powershellEnvFile}". ` +
`Please export these variables to future processes so the build can be traced. ` +
`If using cmd/batch run "call ${batEnvFile}" ` +
`or if using PowerShell run "cat ${powershellEnvFile} | Invoke-Expression".`);
logger.info(
`\nCodeQL environment output to "${jsonEnvFile}", "${batEnvFile}" and "${powershellEnvFile}". ` +
`Please export these variables to future processes so the build can be traced. ` +
`If using cmd/batch run "call ${batEnvFile}" ` +
`or if using PowerShell run "cat ${powershellEnvFile} | Invoke-Expression".`
);
} else {
// Assume that anything that's not windows is using a unix-style shell
const shEnvFile = path.join(config.tempDir, 'codeql-env.sh');
const shEnvFile = path.join(config.tempDir, "codeql-env.sh");
const shEnvFileContents = Object.entries(tracerConfig.env)
// Some vars contain ${LIB} that we do not want to be expanded when executing this script
.map(([key, value]) => `export ${key}="${value.replace('$', '\\$')}"`)
.join('\n');
.map(([key, value]) => `export ${key}="${value.replace("$", "\\$")}"`)
.join("\n");
fs.writeFileSync(shEnvFile, shEnvFileContents);
logger.info(`\nCodeQL environment output to "${jsonEnvFile}" and "${shEnvFile}". ` +
`Please export these variables to future processes so the build can be traced, ` +
`for example by running ". ${shEnvFile}".`);
logger.info(
`\nCodeQL environment output to "${jsonEnvFile}" and "${shEnvFile}". ` +
`Please export these variables to future processes so the build can be traced, ` +
`for example by running ". ${shEnvFile}".`
);
}
} catch (e) {
logger.error('Init failed');
logger.error("Init failed");
logger.error(e);
process.exitCode = 1;
}
@@ -230,26 +255,38 @@ interface AutobuildArgs {
}
program
.command('autobuild')
.description('Attempts to automatically build code')
.option('--language <language>', 'The language to build. Otherwise will detect the dominant compiled language.')
.option('--temp-dir <dir>', 'Directory to use for temporary files. Default is "./codeql-runner".')
.option('--debug', 'Print more verbose output', false)
.command("autobuild")
.description("Attempts to automatically build code")
.option(
"--language <language>",
"The language to build. Otherwise will detect the dominant compiled language."
)
.option(
"--temp-dir <dir>",
'Directory to use for temporary files. Default is "./codeql-runner".'
)
.option("--debug", "Print more verbose output", false)
.action(async (cmd: AutobuildArgs) => {
const logger = getRunnerLogger(cmd.debug);
try {
const config = await getConfig(getTempDir(cmd.tempDir), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. " +
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
throw new Error(
"Config file could not be found at expected location. " +
"Was the 'init' command run with the same '--temp-dir' argument as this command."
);
}
importTracerEnvironment(config);
let language: Language | undefined = undefined;
if (cmd.language !== undefined) {
language = parseLanguage(cmd.language);
if (language === undefined || !config.languages.includes(language)) {
throw new Error(`"${cmd.language}" is not a recognised language. ` +
`Known languages in this project are ${config.languages.join(', ')}.`);
throw new Error(
`"${cmd.language}" is not a recognised language. ` +
`Known languages in this project are ${config.languages.join(
", "
)}.`
);
}
} else {
language = determineAutobuildLanguage(config, logger);
@@ -258,7 +295,7 @@ program
await runAutobuild(language, config, logger);
}
} catch (e) {
logger.error('Autobuild failed');
logger.error("Autobuild failed");
logger.error(e);
process.exitCode = 1;
}
@@ -281,31 +318,57 @@ interface AnalyzeArgs {
}
program
.command('analyze')
.description('Finishes extracting code and runs CodeQL queries')
.requiredOption('--repository <repository>', 'Repository name. (Required)')
.requiredOption('--commit <commit>', 'SHA of commit that was analyzed. (Required)')
.requiredOption('--ref <ref>', 'Name of ref that was analyzed. (Required)')
.requiredOption('--github-url <url>', 'URL of GitHub instance. (Required)')
.requiredOption('--github-auth <auth>', 'GitHub Apps token or personal access token. (Required)')
.option('--checkout-path <path>', 'Checkout path. Default is the current working directory.')
.option('--no-upload', 'Do not upload results after analysis.')
.option('--output-dir <dir>', 'Directory to output SARIF files to. Default is in the temp directory.')
.option('--ram <ram>', 'Amount of memory to use when running queries. Default is to use all available memory.')
.option('--no-add-snippets', 'Specify whether to include code snippets in the sarif output.')
.option('--threads <threads>', 'Number of threads to use when running queries. ' +
'Default is to use all available cores.')
.option('--temp-dir <dir>', 'Directory to use for temporary files. Default is "./codeql-runner".')
.option('--debug', 'Print more verbose output', false)
.command("analyze")
.description("Finishes extracting code and runs CodeQL queries")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption(
"--commit <commit>",
"SHA of commit that was analyzed. (Required)"
)
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption(
"--github-auth <auth>",
"GitHub Apps token or personal access token. (Required)"
)
.option(
"--checkout-path <path>",
"Checkout path. Default is the current working directory."
)
.option("--no-upload", "Do not upload results after analysis.")
.option(
"--output-dir <dir>",
"Directory to output SARIF files to. Default is in the temp directory."
)
.option(
"--ram <ram>",
"Amount of memory to use when running queries. Default is to use all available memory."
)
.option(
"--no-add-snippets",
"Specify whether to include code snippets in the sarif output."
)
.option(
"--threads <threads>",
"Number of threads to use when running queries. " +
"Default is to use all available cores."
)
.option(
"--temp-dir <dir>",
'Directory to use for temporary files. Default is "./codeql-runner".'
)
.option("--debug", "Print more verbose output", false)
.action(async (cmd: AnalyzeArgs) => {
const logger = getRunnerLogger(cmd.debug);
try {
const tempDir = getTempDir(cmd.tempDir);
const outputDir = cmd.outputDir || path.join(tempDir, 'codeql-sarif');
const outputDir = cmd.outputDir || path.join(tempDir, "codeql-sarif");
const config = await getConfig(getTempDir(cmd.tempDir), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. " +
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
throw new Error(
"Config file could not be found at expected location. " +
"Was the 'init' command run with the same '--temp-dir' argument as this command."
);
}
await runAnalyze(
parseRepositoryNwo(cmd.repository),
@@ -319,15 +382,16 @@ program
cmd.githubAuth,
parseGithubUrl(cmd.githubUrl),
cmd.upload,
'runner',
"runner",
outputDir,
getMemoryFlag(cmd.ram),
getAddSnippetsFlag(cmd.addSnippets),
getThreadsFlag(cmd.threads, logger),
config,
logger);
logger
);
} catch (e) {
logger.error('Analyze failed');
logger.error("Analyze failed");
logger.error(e);
process.exitCode = 1;
}
@@ -345,16 +409,30 @@ interface UploadArgs {
}
program
.command('upload')
.description('Uploads a SARIF file, or all SARIF files from a directory, to code scanning')
.requiredOption('--sarif-file <file>', 'SARIF file to upload, or a directory containing multiple SARIF files. (Required)')
.requiredOption('--repository <repository>', 'Repository name. (Required)')
.requiredOption('--commit <commit>', 'SHA of commit that was analyzed. (Required)')
.requiredOption('--ref <ref>', 'Name of ref that was analyzed. (Required)')
.requiredOption('--github-url <url>', 'URL of GitHub instance. (Required)')
.requiredOption('--github-auth <auth>', 'GitHub Apps token or personal access token. (Required)')
.option('--checkout-path <path>', 'Checkout path. Default is the current working directory.')
.option('--debug', 'Print more verbose output', false)
.command("upload")
.description(
"Uploads a SARIF file, or all SARIF files from a directory, to code scanning"
)
.requiredOption(
"--sarif-file <file>",
"SARIF file to upload, or a directory containing multiple SARIF files. (Required)"
)
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption(
"--commit <commit>",
"SHA of commit that was analyzed. (Required)"
)
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.requiredOption(
"--github-auth <auth>",
"GitHub Apps token or personal access token. (Required)"
)
.option(
"--checkout-path <path>",
"Checkout path. Default is the current working directory."
)
.option("--debug", "Print more verbose output", false)
.action(async (cmd: UploadArgs) => {
const logger = getRunnerLogger(cmd.debug);
try {
@@ -370,10 +448,11 @@ program
undefined,
cmd.githubAuth,
parseGithubUrl(cmd.githubUrl),
'runner',
logger);
"runner",
logger
);
} catch (e) {
logger.error('Upload failed');
logger.error("Upload failed");
logger.error(e);
process.exitCode = 1;
}

View File

@@ -1,7 +1,7 @@
export const ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
export const ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
// The time at which the first action (normally init) started executing.
// If a workflow invokes a different action without first invoking the init
// action (i.e. the upload action is being used by a third-party integrator)
// then this variable will be assigned the start time of the action invoked
// rather that the init action.
export const CODEQL_WORKFLOW_STARTED_AT = 'CODEQL_WORKFLOW_STARTED_AT';
export const CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";

View File

@@ -1,31 +1,40 @@
import {TestInterface} from 'ava';
import sinon from 'sinon';
import { TestInterface } from "ava";
import sinon from "sinon";
import * as CodeQL from './codeql';
import * as CodeQL from "./codeql";
type TestContext = {stdoutWrite: any, stderrWrite: any, testOutput: string, env: NodeJS.ProcessEnv};
type TestContext = {
stdoutWrite: any;
stderrWrite: any;
testOutput: string;
env: NodeJS.ProcessEnv;
};
function wrapOutput(context: TestContext) {
// Function signature taken from Socket.write.
// Note there are two overloads:
// write(buffer: Uint8Array | string, cb?: (err?: Error) => void): boolean;
// write(str: Uint8Array | string, encoding?: string, cb?: (err?: Error) => void): boolean;
return (chunk: Uint8Array | string, encoding?: string, cb?: (err?: Error) => void): boolean => {
return (
chunk: Uint8Array | string,
encoding?: string,
cb?: (err?: Error) => void
): boolean => {
// Work out which method overload we are in
if (cb === undefined && typeof encoding === 'function') {
if (cb === undefined && typeof encoding === "function") {
cb = encoding;
encoding = undefined;
}
// Record the output
if (typeof chunk === 'string') {
if (typeof chunk === "string") {
context.testOutput += chunk;
} else {
context.testOutput += new TextDecoder(encoding || 'utf-8').decode(chunk);
context.testOutput += new TextDecoder(encoding || "utf-8").decode(chunk);
}
// Satisfy contract by calling callback when done
if (cb !== undefined && typeof cb === 'function') {
if (cb !== undefined && typeof cb === "function") {
cb();
}
@@ -36,7 +45,7 @@ function wrapOutput(context: TestContext) {
export function setupTests(test: TestInterface<any>) {
const typedTest = test as TestInterface<TestContext>;
typedTest.beforeEach(t => {
typedTest.beforeEach((t) => {
// Set an empty CodeQL object so that all method calls will fail
// unless the test explicitly sets one up.
CodeQL.setCodeQL({});
@@ -57,7 +66,7 @@ export function setupTests(test: TestInterface<any>) {
Object.assign(t.context.env, process.env);
});
typedTest.afterEach.always(t => {
typedTest.afterEach.always((t) => {
// Restore stdout and stderr
// The captured output is only replayed if the test failed
process.stdout.write = t.context.stdoutWrite;

View File

@@ -1,13 +1,17 @@
import test from 'ava';
import * as fs from 'fs';
import * as path from 'path';
import test from "ava";
import * as fs from "fs";
import * as path from "path";
import { setCodeQL } from './codeql';
import * as configUtils from './config-utils';
import { Language } from './languages';
import { setupTests } from './testing-utils';
import { concatTracerConfigs, getCombinedTracerConfig, getTracerConfigForLanguage } from './tracer-config';
import * as util from './util';
import { setCodeQL } from "./codeql";
import * as configUtils from "./config-utils";
import { Language } from "./languages";
import { setupTests } from "./testing-utils";
import {
concatTracerConfigs,
getCombinedTracerConfig,
getTracerConfigForLanguage,
} from "./tracer-config";
import * as util from "./util";
setupTests(test);
@@ -20,236 +24,260 @@ function getTestConfig(tmpDir: string): configUtils.Config {
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: '',
codeQLCmd: "",
};
}
// A very minimal setup
test('getTracerConfigForLanguage - minimal setup', async t => {
await util.withTmpDir(async tmpDir => {
test("getTracerConfigForLanguage - minimal setup", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const codeQL = setCodeQL({
getTracerEnv: async function() {
async getTracerEnv() {
return {
'ODASA_TRACER_CONFIGURATION': 'abc',
'foo': 'bar'
ODASA_TRACER_CONFIGURATION: "abc",
foo: "bar",
};
},
});
const result = await getTracerConfigForLanguage(codeQL, config, Language.javascript);
t.deepEqual(result, { spec: 'abc', env: {'foo': 'bar'} });
const result = await getTracerConfigForLanguage(
codeQL,
config,
Language.javascript
);
t.deepEqual(result, { spec: "abc", env: { foo: "bar" } });
});
});
// Existing vars should not be overwritten, unless they are critical or prefixed with CODEQL_
test('getTracerConfigForLanguage - existing / critical vars', async t => {
await util.withTmpDir(async tmpDir => {
test("getTracerConfigForLanguage - existing / critical vars", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
// Set up some variables in the environment
process.env['foo'] = 'abc';
process.env['SEMMLE_PRELOAD_libtrace'] = 'abc';
process.env['SEMMLE_RUNNER'] = 'abc';
process.env['SEMMLE_COPY_EXECUTABLES_ROOT'] = 'abc';
process.env['SEMMLE_DEPTRACE_SOCKET'] = 'abc';
process.env['SEMMLE_JAVA_TOOL_OPTIONS'] = 'abc';
process.env['SEMMLE_DEPTRACE_SOCKET'] = 'abc';
process.env['CODEQL_VAR'] = 'abc';
process.env["foo"] = "abc";
process.env["SEMMLE_PRELOAD_libtrace"] = "abc";
process.env["SEMMLE_RUNNER"] = "abc";
process.env["SEMMLE_COPY_EXECUTABLES_ROOT"] = "abc";
process.env["SEMMLE_DEPTRACE_SOCKET"] = "abc";
process.env["SEMMLE_JAVA_TOOL_OPTIONS"] = "abc";
process.env["SEMMLE_DEPTRACE_SOCKET"] = "abc";
process.env["CODEQL_VAR"] = "abc";
// Now CodeQL returns all these variables, and one more, with different values
const codeQL = setCodeQL({
getTracerEnv: async function() {
async getTracerEnv() {
return {
'ODASA_TRACER_CONFIGURATION': 'abc',
'foo': 'bar',
'baz': 'qux',
'SEMMLE_PRELOAD_libtrace': 'SEMMLE_PRELOAD_libtrace',
'SEMMLE_RUNNER': 'SEMMLE_RUNNER',
'SEMMLE_COPY_EXECUTABLES_ROOT': 'SEMMLE_COPY_EXECUTABLES_ROOT',
'SEMMLE_DEPTRACE_SOCKET': 'SEMMLE_DEPTRACE_SOCKET',
'SEMMLE_JAVA_TOOL_OPTIONS': 'SEMMLE_JAVA_TOOL_OPTIONS',
'CODEQL_VAR': 'CODEQL_VAR',
ODASA_TRACER_CONFIGURATION: "abc",
foo: "bar",
baz: "qux",
SEMMLE_PRELOAD_libtrace: "SEMMLE_PRELOAD_libtrace",
SEMMLE_RUNNER: "SEMMLE_RUNNER",
SEMMLE_COPY_EXECUTABLES_ROOT: "SEMMLE_COPY_EXECUTABLES_ROOT",
SEMMLE_DEPTRACE_SOCKET: "SEMMLE_DEPTRACE_SOCKET",
SEMMLE_JAVA_TOOL_OPTIONS: "SEMMLE_JAVA_TOOL_OPTIONS",
CODEQL_VAR: "CODEQL_VAR",
};
},
});
const result = await getTracerConfigForLanguage(codeQL, config, Language.javascript);
const result = await getTracerConfigForLanguage(
codeQL,
config,
Language.javascript
);
t.deepEqual(result, {
spec: 'abc',
spec: "abc",
env: {
// Should contain all variables except 'foo', because that already existed in the
// environment with a different value, and is not deemed a "critical" variable.
'baz': 'qux',
'SEMMLE_PRELOAD_libtrace': 'SEMMLE_PRELOAD_libtrace',
'SEMMLE_RUNNER': 'SEMMLE_RUNNER',
'SEMMLE_COPY_EXECUTABLES_ROOT': 'SEMMLE_COPY_EXECUTABLES_ROOT',
'SEMMLE_DEPTRACE_SOCKET': 'SEMMLE_DEPTRACE_SOCKET',
'SEMMLE_JAVA_TOOL_OPTIONS': 'SEMMLE_JAVA_TOOL_OPTIONS',
'CODEQL_VAR': 'CODEQL_VAR',
}
baz: "qux",
SEMMLE_PRELOAD_libtrace: "SEMMLE_PRELOAD_libtrace",
SEMMLE_RUNNER: "SEMMLE_RUNNER",
SEMMLE_COPY_EXECUTABLES_ROOT: "SEMMLE_COPY_EXECUTABLES_ROOT",
SEMMLE_DEPTRACE_SOCKET: "SEMMLE_DEPTRACE_SOCKET",
SEMMLE_JAVA_TOOL_OPTIONS: "SEMMLE_JAVA_TOOL_OPTIONS",
CODEQL_VAR: "CODEQL_VAR",
},
});
});
});
test('concatTracerConfigs - minimal configs correctly combined', async t => {
await util.withTmpDir(async tmpDir => {
test("concatTracerConfigs - minimal configs correctly combined", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec1 = path.join(tmpDir, 'spec1');
fs.writeFileSync(spec1, 'foo.log\n2\nabc\ndef');
const spec1 = path.join(tmpDir, "spec1");
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
const tc1 = {
spec: spec1,
env: {
'a': 'a',
'b': 'b',
}
a: "a",
b: "b",
},
};
const spec2 = path.join(tmpDir, 'spec2');
fs.writeFileSync(spec2, 'foo.log\n1\nghi');
const spec2 = path.join(tmpDir, "spec2");
fs.writeFileSync(spec2, "foo.log\n1\nghi");
const tc2 = {
spec: spec2,
env: {
'c': 'c',
}
c: "c",
},
};
const result = concatTracerConfigs({ 'javascript': tc1, 'python': tc2 }, config);
const result = concatTracerConfigs(
{ javascript: tc1, python: tc2 },
config
);
t.deepEqual(result, {
spec: path.join(tmpDir, 'compound-spec'),
spec: path.join(tmpDir, "compound-spec"),
env: {
'a': 'a',
'b': 'b',
'c': 'c',
}
a: "a",
b: "b",
c: "c",
},
});
t.true(fs.existsSync(result.spec));
t.deepEqual(
fs.readFileSync(result.spec, 'utf8'),
path.join(tmpDir, 'compound-build-tracer.log') + '\n3\nabc\ndef\nghi');
fs.readFileSync(result.spec, "utf8"),
`${path.join(tmpDir, "compound-build-tracer.log")}\n3\nabc\ndef\nghi`
);
});
});
test('concatTracerConfigs - conflicting env vars', async t => {
await util.withTmpDir(async tmpDir => {
test("concatTracerConfigs - conflicting env vars", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec = path.join(tmpDir, 'spec');
fs.writeFileSync(spec, 'foo.log\n0');
const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n0");
// Ok if env vars have the same name and the same value
t.deepEqual(
concatTracerConfigs(
{
'javascript': {spec: spec, env: {'a': 'a', 'b': 'b'}},
'python': {spec: spec, env: {'b': 'b', 'c': 'c'}},
javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { b: "b", c: "c" } },
},
config).env,
config
).env,
{
'a': 'a',
'b': 'b',
'c': 'c',
});
a: "a",
b: "b",
c: "c",
}
);
// Throws if env vars have same name but different values
const e = t.throws(() =>
concatTracerConfigs(
{
'javascript': {spec: spec, env: {'a': 'a', 'b': 'b'}},
'python': {spec: spec, env: {'b': 'c'}},
javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { b: "c" } },
},
config));
t.deepEqual(e.message, 'Incompatible values in environment parameter b: b and c');
config
)
);
t.deepEqual(
e.message,
"Incompatible values in environment parameter b: b and c"
);
});
});
test('concatTracerConfigs - cpp spec lines come last if present', async t => {
await util.withTmpDir(async tmpDir => {
test("concatTracerConfigs - cpp spec lines come last if present", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec1 = path.join(tmpDir, 'spec1');
fs.writeFileSync(spec1, 'foo.log\n2\nabc\ndef');
const spec1 = path.join(tmpDir, "spec1");
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
const tc1 = {
spec: spec1,
env: {
'a': 'a',
'b': 'b',
}
a: "a",
b: "b",
},
};
const spec2 = path.join(tmpDir, 'spec2');
fs.writeFileSync(spec2, 'foo.log\n1\nghi');
const spec2 = path.join(tmpDir, "spec2");
fs.writeFileSync(spec2, "foo.log\n1\nghi");
const tc2 = {
spec: spec2,
env: {
'c': 'c',
}
c: "c",
},
};
const result = concatTracerConfigs({ 'cpp': tc1, 'python': tc2 }, config);
const result = concatTracerConfigs({ cpp: tc1, python: tc2 }, config);
t.deepEqual(result, {
spec: path.join(tmpDir, 'compound-spec'),
spec: path.join(tmpDir, "compound-spec"),
env: {
'a': 'a',
'b': 'b',
'c': 'c',
}
a: "a",
b: "b",
c: "c",
},
});
t.true(fs.existsSync(result.spec));
t.deepEqual(
fs.readFileSync(result.spec, 'utf8'),
path.join(tmpDir, 'compound-build-tracer.log') + '\n3\nghi\nabc\ndef');
fs.readFileSync(result.spec, "utf8"),
`${path.join(tmpDir, "compound-build-tracer.log")}\n3\nghi\nabc\ndef`
);
});
});
test('concatTracerConfigs - SEMMLE_COPY_EXECUTABLES_ROOT is updated to point to compound spec', async t => {
await util.withTmpDir(async tmpDir => {
test("concatTracerConfigs - SEMMLE_COPY_EXECUTABLES_ROOT is updated to point to compound spec", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec = path.join(tmpDir, 'spec');
fs.writeFileSync(spec, 'foo.log\n0');
const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n0");
const result = concatTracerConfigs(
{
'javascript': {spec: spec, env: {'a': 'a', 'b': 'b'}},
'python': {spec: spec, env: {'SEMMLE_COPY_EXECUTABLES_ROOT': 'foo'}},
javascript: { spec, env: { a: "a", b: "b" } },
python: { spec, env: { SEMMLE_COPY_EXECUTABLES_ROOT: "foo" } },
},
config);
config
);
t.deepEqual(result.env, {
'a': 'a',
'b': 'b',
'SEMMLE_COPY_EXECUTABLES_ROOT': path.join(tmpDir, 'compound-temp')
a: "a",
b: "b",
SEMMLE_COPY_EXECUTABLES_ROOT: path.join(tmpDir, "compound-temp"),
});
});
});
test('concatTracerConfigs - compound environment file is created correctly', async t => {
await util.withTmpDir(async tmpDir => {
test("concatTracerConfigs - compound environment file is created correctly", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec1 = path.join(tmpDir, 'spec1');
fs.writeFileSync(spec1, 'foo.log\n2\nabc\ndef');
const spec1 = path.join(tmpDir, "spec1");
fs.writeFileSync(spec1, "foo.log\n2\nabc\ndef");
const tc1 = {
spec: spec1,
env: {
'a': 'a',
}
a: "a",
},
};
const spec2 = path.join(tmpDir, 'spec2');
fs.writeFileSync(spec2, 'foo.log\n1\nghi');
const spec2 = path.join(tmpDir, "spec2");
fs.writeFileSync(spec2, "foo.log\n1\nghi");
const tc2 = {
spec: spec2,
env: {
'foo': 'bar_baz',
}
foo: "bar_baz",
},
};
const result = concatTracerConfigs({ 'javascript': tc1, 'python': tc2 }, config);
const envPath = result.spec + '.environment';
const result = concatTracerConfigs(
{ javascript: tc1, python: tc2 },
config
);
const envPath = `${result.spec}.environment`;
t.true(fs.existsSync(envPath));
const buffer: Buffer = fs.readFileSync(envPath);
@@ -257,23 +285,23 @@ test('concatTracerConfigs - compound environment file is created correctly', asy
t.deepEqual(buffer.length, 28);
t.deepEqual(buffer.readInt32LE(0), 2); // number of env vars
t.deepEqual(buffer.readInt32LE(4), 4); // length of env var definition
t.deepEqual(buffer.toString('utf8', 8, 12), 'a=a\0'); // [key]=[value]\0
t.deepEqual(buffer.readInt32LE(12), 12); // length of env var definition
t.deepEqual(buffer.toString('utf8', 16, 28), 'foo=bar_baz\0'); // [key]=[value]\0
t.deepEqual(buffer.toString("utf8", 8, 12), "a=a\0"); // [key]=[value]\0
t.deepEqual(buffer.readInt32LE(12), 12); // length of env var definition
t.deepEqual(buffer.toString("utf8", 16, 28), "foo=bar_baz\0"); // [key]=[value]\0
});
});
test('getCombinedTracerConfig - return undefined when no languages are traced languages', async t => {
await util.withTmpDir(async tmpDir => {
test("getCombinedTracerConfig - return undefined when no languages are traced languages", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
// No traced languages
config.languages = [Language.javascript, Language.python];
const codeQL = setCodeQL({
getTracerEnv: async function() {
async getTracerEnv() {
return {
'ODASA_TRACER_CONFIGURATION': 'abc',
'foo': 'bar'
ODASA_TRACER_CONFIGURATION: "abc",
foo: "bar",
};
},
});
@@ -282,18 +310,18 @@ test('getCombinedTracerConfig - return undefined when no languages are traced la
});
});
test('getCombinedTracerConfig - valid spec file', async t => {
await util.withTmpDir(async tmpDir => {
test("getCombinedTracerConfig - valid spec file", async (t) => {
await util.withTmpDir(async (tmpDir) => {
const config = getTestConfig(tmpDir);
const spec = path.join(tmpDir, 'spec');
fs.writeFileSync(spec, 'foo.log\n2\nabc\ndef');
const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n2\nabc\ndef");
const codeQL = setCodeQL({
getTracerEnv: async function() {
async getTracerEnv() {
return {
'ODASA_TRACER_CONFIGURATION': spec,
'foo': 'bar',
ODASA_TRACER_CONFIGURATION: spec,
foo: "bar",
};
},
});
@@ -301,17 +329,27 @@ test('getCombinedTracerConfig - valid spec file', async t => {
const result = await getCombinedTracerConfig(config, codeQL);
const expectedEnv = {
'foo': 'bar',
'ODASA_TRACER_CONFIGURATION': result!.spec,
foo: "bar",
ODASA_TRACER_CONFIGURATION: result!.spec,
};
if (process.platform === 'darwin') {
expectedEnv['DYLD_INSERT_LIBRARIES'] = path.join(path.dirname(codeQL.getPath()), 'tools', 'osx64', 'libtrace.dylib');
} else if (process.platform !== 'win32') {
expectedEnv['LD_PRELOAD'] = path.join(path.dirname(codeQL.getPath()), 'tools', 'linux64', '${LIB}trace.so');
if (process.platform === "darwin") {
expectedEnv["DYLD_INSERT_LIBRARIES"] = path.join(
path.dirname(codeQL.getPath()),
"tools",
"osx64",
"libtrace.dylib"
);
} else if (process.platform !== "win32") {
expectedEnv["LD_PRELOAD"] = path.join(
path.dirname(codeQL.getPath()),
"tools",
"linux64",
"${LIB}trace.so"
);
}
t.deepEqual(result, {
spec: path.join(tmpDir, 'compound-spec'),
spec: path.join(tmpDir, "compound-spec"),
env: expectedEnv,
});
});

View File

@@ -1,49 +1,59 @@
import * as fs from 'fs';
import * as path from 'path';
import * as fs from "fs";
import * as path from "path";
import { CodeQL } from './codeql';
import * as configUtils from './config-utils';
import { isTracedLanguage, Language } from './languages';
import * as util from './util';
import { CodeQL } from "./codeql";
import * as configUtils from "./config-utils";
import { Language, isTracedLanguage } from "./languages";
import * as util from "./util";
export type TracerConfig = {
spec: string;
env: { [key: string]: string };
};
const CRITICAL_TRACER_VARS = new Set(
['SEMMLE_PRELOAD_libtrace',
, 'SEMMLE_RUNNER',
, 'SEMMLE_COPY_EXECUTABLES_ROOT',
, 'SEMMLE_DEPTRACE_SOCKET',
, 'SEMMLE_JAVA_TOOL_OPTIONS'
]);
const CRITICAL_TRACER_VARS = new Set([
"SEMMLE_PRELOAD_libtrace",
,
"SEMMLE_RUNNER",
,
"SEMMLE_COPY_EXECUTABLES_ROOT",
,
"SEMMLE_DEPTRACE_SOCKET",
,
"SEMMLE_JAVA_TOOL_OPTIONS",
]);
export async function getTracerConfigForLanguage(
codeql: CodeQL,
config: configUtils.Config,
language: Language): Promise<TracerConfig> {
language: Language
): Promise<TracerConfig> {
const env = await codeql.getTracerEnv(
util.getCodeQLDatabasePath(config.tempDir, language)
);
const env = await codeql.getTracerEnv(util.getCodeQLDatabasePath(config.tempDir, language));
const spec = env['ODASA_TRACER_CONFIGURATION'];
const spec = env["ODASA_TRACER_CONFIGURATION"];
const info: TracerConfig = { spec, env: {} };
// Extract critical tracer variables from the environment
for (let entry of Object.entries(env)) {
for (const entry of Object.entries(env)) {
const key = entry[0];
const value = entry[1];
// skip ODASA_TRACER_CONFIGURATION as it is handled separately
if (key === 'ODASA_TRACER_CONFIGURATION') {
if (key === "ODASA_TRACER_CONFIGURATION") {
continue;
}
// skip undefined values
if (typeof value === 'undefined') {
if (typeof value === "undefined") {
continue;
}
// Keep variables that do not exist in current environment. In addition always keep
// critical and CODEQL_ variables
if (typeof process.env[key] === 'undefined' || CRITICAL_TRACER_VARS.has(key) || key.startsWith('CODEQL_')) {
if (
typeof process.env[key] === "undefined" ||
CRITICAL_TRACER_VARS.has(key) ||
key.startsWith("CODEQL_")
) {
info.env[key] = value;
}
}
@@ -52,26 +62,27 @@ export async function getTracerConfigForLanguage(
export function concatTracerConfigs(
tracerConfigs: { [lang: string]: TracerConfig },
config: configUtils.Config): TracerConfig {
config: configUtils.Config
): TracerConfig {
// A tracer config is a map containing additional environment variables and a tracer 'spec' file.
// A tracer 'spec' file has the following format [log_file, number_of_blocks, blocks_text]
// Merge the environments
const env: { [key: string]: string; } = {};
const env: { [key: string]: string } = {};
let copyExecutables = false;
let envSize = 0;
for (const v of Object.values(tracerConfigs)) {
for (let e of Object.entries(v.env)) {
for (const e of Object.entries(v.env)) {
const name = e[0];
const value = e[1];
// skip SEMMLE_COPY_EXECUTABLES_ROOT as it is handled separately
if (name === 'SEMMLE_COPY_EXECUTABLES_ROOT') {
if (name === "SEMMLE_COPY_EXECUTABLES_ROOT") {
copyExecutables = true;
} else if (name in env) {
if (env[name] !== value) {
throw Error('Incompatible values in environment parameter ' +
name + ': ' + env[name] + ' and ' + value);
throw Error(
`Incompatible values in environment parameter ${name}: ${env[name]} and ${value}`
);
}
} else {
env[name] = value;
@@ -81,49 +92,58 @@ export function concatTracerConfigs(
}
// Concatenate spec files into a new spec file
let languages = Object.keys(tracerConfigs);
const cppIndex = languages.indexOf('cpp');
const languages = Object.keys(tracerConfigs);
const cppIndex = languages.indexOf("cpp");
// Make sure cpp is the last language, if it's present since it must be concatenated last
if (cppIndex !== -1) {
let lastLang = languages[languages.length - 1];
const lastLang = languages[languages.length - 1];
languages[languages.length - 1] = languages[cppIndex];
languages[cppIndex] = lastLang;
}
let totalLines: string[] = [];
const totalLines: string[] = [];
let totalCount = 0;
for (let lang of languages) {
const lines = fs.readFileSync(tracerConfigs[lang].spec, 'utf8').split(/\r?\n/);
for (const lang of languages) {
const lines = fs
.readFileSync(tracerConfigs[lang].spec, "utf8")
.split(/\r?\n/);
const count = parseInt(lines[1], 10);
totalCount += count;
totalLines.push(...lines.slice(2));
}
const newLogFilePath = path.resolve(config.tempDir, 'compound-build-tracer.log');
const spec = path.resolve(config.tempDir, 'compound-spec');
const compoundTempFolder = path.resolve(config.tempDir, 'compound-temp');
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
const newLogFilePath = path.resolve(
config.tempDir,
"compound-build-tracer.log"
);
const spec = path.resolve(config.tempDir, "compound-spec");
const compoundTempFolder = path.resolve(config.tempDir, "compound-temp");
const newSpecContent = [
newLogFilePath,
totalCount.toString(10),
...totalLines,
];
if (copyExecutables) {
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
env["SEMMLE_COPY_EXECUTABLES_ROOT"] = compoundTempFolder;
envSize += 1;
}
fs.writeFileSync(spec, newSpecContent.join('\n'));
fs.writeFileSync(spec, newSpecContent.join("\n"));
// Prepare the content of the compound environment file
let buffer = Buffer.alloc(4);
buffer.writeInt32LE(envSize, 0);
for (let e of Object.entries(env)) {
for (const e of Object.entries(env)) {
const key = e[0];
const value = e[1];
const lineBuffer = new Buffer(key + '=' + value + '\0', 'utf8');
const lineBuffer = new Buffer(`${key}=${value}\0`, "utf8");
const sizeBuffer = Buffer.alloc(4);
sizeBuffer.writeInt32LE(lineBuffer.length, 0);
buffer = Buffer.concat([buffer, sizeBuffer, lineBuffer]);
}
// Write the compound environment
const envPath = spec + '.environment';
const envPath = `${spec}.environment`;
fs.writeFileSync(envPath, buffer);
return { env, spec };
@@ -131,8 +151,8 @@ export function concatTracerConfigs(
export async function getCombinedTracerConfig(
config: configUtils.Config,
codeql: CodeQL): Promise<TracerConfig | undefined> {
codeql: CodeQL
): Promise<TracerConfig | undefined> {
// Abort if there are no traced languages as there's nothing to do
const tracedLanguages = config.languages.filter(isTracedLanguage);
if (tracedLanguages.length === 0) {
@@ -142,17 +162,31 @@ export async function getCombinedTracerConfig(
// Get all the tracer configs and combine them together
const tracedLanguageConfigs: { [lang: string]: TracerConfig } = {};
for (const language of tracedLanguages) {
tracedLanguageConfigs[language] = await getTracerConfigForLanguage(codeql, config, language);
tracedLanguageConfigs[language] = await getTracerConfigForLanguage(
codeql,
config,
language
);
}
const mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
// Add a couple more variables
mainTracerConfig.env['ODASA_TRACER_CONFIGURATION'] = mainTracerConfig.spec;
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === 'darwin') {
mainTracerConfig.env['DYLD_INSERT_LIBRARIES'] = path.join(codeQLDir, 'tools', 'osx64', 'libtrace.dylib');
} else if (process.platform !== 'win32') {
mainTracerConfig.env['LD_PRELOAD'] = path.join(codeQLDir, 'tools', 'linux64', '${LIB}trace.so');
if (process.platform === "darwin") {
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(
codeQLDir,
"tools",
"osx64",
"libtrace.dylib"
);
} else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(
codeQLDir,
"tools",
"linux64",
"${LIB}trace.so"
);
}
return mainTracerConfig;

View File

@@ -1,17 +1,21 @@
import test from 'ava';
import test from "ava";
import { getRunnerLogger } from './logging';
import {setupTests} from './testing-utils';
import * as uploadLib from './upload-lib';
import { getRunnerLogger } from "./logging";
import { setupTests } from "./testing-utils";
import * as uploadLib from "./upload-lib";
setupTests(test);
test('validateSarifFileSchema - valid', t => {
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile, getRunnerLogger(true)));
test("validateSarifFileSchema - valid", (t) => {
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
t.notThrows(() =>
uploadLib.validateSarifFileSchema(inputFile, getRunnerLogger(true))
);
});
test('validateSarifFileSchema - invalid', t => {
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
t.throws(() => uploadLib.validateSarifFileSchema(inputFile, getRunnerLogger(true)));
test("validateSarifFileSchema - invalid", (t) => {
const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`;
t.throws(() =>
uploadLib.validateSarifFileSchema(inputFile, getRunnerLogger(true))
);
});

View File

@@ -1,32 +1,32 @@
import * as core from '@actions/core';
import fileUrl from 'file-url';
import * as fs from 'fs';
import * as jsonschema from 'jsonschema';
import * as path from 'path';
import zlib from 'zlib';
import * as core from "@actions/core";
import fileUrl from "file-url";
import * as fs from "fs";
import * as jsonschema from "jsonschema";
import * as path from "path";
import zlib from "zlib";
import * as api from './api-client';
import * as fingerprints from './fingerprints';
import { Logger } from './logging';
import { RepositoryNwo } from './repository';
import * as sharedEnv from './shared-environment';
import * as util from './util';
import * as api from "./api-client";
import * as fingerprints from "./fingerprints";
import { Logger } from "./logging";
import { RepositoryNwo } from "./repository";
import * as sharedEnv from "./shared-environment";
import * as util from "./util";
// Takes a list of paths to sarif files and combines them together,
// returning the contents of the combined sarif file.
export function combineSarifFiles(sarifFiles: string[]): string {
let combinedSarif = {
const combinedSarif = {
version: null,
runs: [] as any[]
runs: [] as any[],
};
for (let sarifFile of sarifFiles) {
let sarifObject = JSON.parse(fs.readFileSync(sarifFile, 'utf8'));
for (const sarifFile of sarifFiles) {
const sarifObject = JSON.parse(fs.readFileSync(sarifFile, "utf8"));
// Check SARIF version
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
} else if (combinedSarif.version !== sarifObject.version) {
throw "Different SARIF versions encountered: " + combinedSarif.version + " and " + sarifObject.version;
throw `Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`;
}
combinedSarif.runs.push(...sarifObject.runs);
@@ -43,12 +43,12 @@ async function uploadPayload(
githubAuth: string,
githubUrl: string,
mode: util.Mode,
logger: Logger) {
logger.info('Uploading results');
logger: Logger
) {
logger.info("Uploading results");
// If in test mode we don't want to upload the results
const testMode = process.env['TEST_MODE'] === 'true' || false;
const testMode = process.env["TEST_MODE"] === "true" || false;
if (testMode) {
return;
}
@@ -62,16 +62,17 @@ async function uploadPayload(
const client = api.getApiClient(githubAuth, githubUrl);
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
const reqURL = mode === 'actions'
? 'PUT /repos/:owner/:repo/code-scanning/analysis'
: 'POST /repos/:owner/:repo/code-scanning/sarifs';
const response = await client.request(reqURL, ({
const reqURL =
mode === "actions"
? "PUT /repos/:owner/:repo/code-scanning/analysis"
: "POST /repos/:owner/:repo/code-scanning/sarifs";
const response = await client.request(reqURL, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
data: payload,
}));
});
logger.debug('response status: ' + response.status);
logger.debug(`response status: ${response.status}`);
const statusCode = response.status;
if (statusCode === 202) {
@@ -83,30 +84,41 @@ async function uploadPayload(
// On any other status code that's not 5xx mark the upload as failed
if (!statusCode || statusCode < 500 || statusCode >= 600) {
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
throw new Error(
`Upload failed (${requestID}): (${statusCode}) ${JSON.stringify(
response.data
)}`
);
}
// On a 5xx status code we may retry the request
if (attempt < backoffPeriods.length) {
// Log the failure as a warning but don't mark the action as failed yet
logger.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
' seconds: (' + statusCode + ') ' + JSON.stringify(response.data));
logger.warning(
`Upload attempt (${attempt + 1} of ${
backoffPeriods.length + 1
}) failed (${requestID}). Retrying in ${
backoffPeriods[attempt]
} seconds: (${statusCode}) ${JSON.stringify(response.data)}`
);
// Sleep for the backoff period
await new Promise(r => setTimeout(r, backoffPeriods[attempt] * 1000));
await new Promise((r) => setTimeout(r, backoffPeriods[attempt] * 1000));
continue;
} else {
// If the upload fails with 5xx then we assume it is a temporary problem
// and not an error that the user has caused or can fix.
// We avoid marking the job as failed to avoid breaking CI workflows.
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
throw new Error(
`Upload failed (${requestID}): (${statusCode}) ${JSON.stringify(
response.data
)}`
);
}
}
// This case shouldn't ever happen as the final iteration of the loop
// will always throw an error instead of exiting to here.
throw new Error('Upload failed');
throw new Error("Upload failed");
}
export interface UploadStatusReport {
@@ -134,19 +146,19 @@ export async function upload(
githubAuth: string,
githubUrl: string,
mode: util.Mode,
logger: Logger): Promise<UploadStatusReport> {
logger: Logger
): Promise<UploadStatusReport> {
const sarifFiles: string[] = [];
if (!fs.existsSync(sarifPath)) {
throw new Error(`Path does not exist: ${sarifPath}`);
}
if (fs.lstatSync(sarifPath).isDirectory()) {
fs.readdirSync(sarifPath)
.filter(f => f.endsWith(".sarif"))
.map(f => path.resolve(sarifPath, f))
.forEach(f => sarifFiles.push(f));
.filter((f) => f.endsWith(".sarif"))
.map((f) => path.resolve(sarifPath, f))
.forEach((f) => sarifFiles.push(f));
if (sarifFiles.length === 0) {
throw new Error("No SARIF files found to upload in \"" + sarifPath + "\".");
throw new Error(`No SARIF files found to upload in "${sarifPath}".`);
}
} else {
sarifFiles.push(sarifPath);
@@ -165,7 +177,8 @@ export async function upload(
githubAuth,
githubUrl,
mode,
logger);
logger
);
}
// Counts the number of results in the given SARIF file
@@ -180,22 +193,26 @@ export function countResultsInSarif(sarif: string): number {
// Validates that the given file path refers to a valid SARIF file.
// Throws an error if the file is invalid.
export function validateSarifFileSchema(sarifFilePath: string, logger: Logger) {
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
const schema = require('../src/sarif_v2.1.0_schema.json');
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8"));
const schema = require("../src/sarif_v2.1.0_schema.json");
const result = new jsonschema.Validator().validate(sarif, schema);
if (!result.valid) {
// Output the more verbose error messages in groups as these may be very large.
for (const error of result.errors) {
logger.startGroup("Error details: " + error.stack);
logger.startGroup(`Error details: ${error.stack}`);
logger.info(JSON.stringify(error, null, 2));
logger.endGroup();
}
// Set the main error message to the stacks of all the errors.
// This should be of a manageable size and may even give enough to fix the error.
const sarifErrors = result.errors.map(e => "- " + e.stack);
throw new Error("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + sarifErrors.join("\n"));
const sarifErrors = result.errors.map((e) => `- ${e.stack}`);
throw new Error(
`Unable to upload "${sarifFilePath}" as it is not valid SARIF:\n${sarifErrors.join(
"\n"
)}`
);
}
}
@@ -214,15 +231,17 @@ async function uploadFiles(
githubAuth: string,
githubUrl: string,
mode: util.Mode,
logger: Logger): Promise<UploadStatusReport> {
logger: Logger
): Promise<UploadStatusReport> {
logger.info(`Uploading sarif files: ${JSON.stringify(sarifFiles)}`);
logger.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
if (mode === 'actions') {
if (mode === "actions") {
// This check only works on actions as env vars don't persist between calls to the runner
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
if (process.env[sentinelEnvVar]) {
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
throw new Error(
"Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job"
);
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
}
@@ -233,47 +252,58 @@ async function uploadFiles(
}
let sarifPayload = combineSarifFiles(sarifFiles);
sarifPayload = fingerprints.addFingerprints(sarifPayload, checkoutPath, logger);
sarifPayload = fingerprints.addFingerprints(
sarifPayload,
checkoutPath,
logger
);
const zipped_sarif = zlib.gzipSync(sarifPayload).toString('base64');
let checkoutURI = fileUrl(checkoutPath);
const zipped_sarif = zlib.gzipSync(sarifPayload).toString("base64");
const checkoutURI = fileUrl(checkoutPath);
const toolNames = util.getToolNames(sarifPayload);
let payload: string;
if (mode === 'actions') {
if (mode === "actions") {
payload = JSON.stringify({
"commit_oid": commitOid,
"ref": ref,
"analysis_key": analysisKey,
"analysis_name": analysisName,
"sarif": zipped_sarif,
"workflow_run_id": workflowRunID,
"checkout_uri": checkoutURI,
"environment": environment,
"started_at": process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT],
"tool_names": toolNames,
commit_oid: commitOid,
ref,
analysis_key: analysisKey,
analysis_name: analysisName,
sarif: zipped_sarif,
workflow_run_id: workflowRunID,
checkout_uri: checkoutURI,
environment,
started_at: process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT],
tool_names: toolNames,
});
} else {
payload = JSON.stringify({
"commit_sha": commitOid,
"ref": ref,
"sarif": zipped_sarif,
"checkout_uri": checkoutURI,
"tool_name": toolNames[0],
commit_sha: commitOid,
ref,
sarif: zipped_sarif,
checkout_uri: checkoutURI,
tool_name: toolNames[0],
});
}
// Log some useful debug info about the info
const rawUploadSizeBytes = sarifPayload.length;
logger.debug("Raw upload size: " + rawUploadSizeBytes + " bytes");
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
const zippedUploadSizeBytes = zipped_sarif.length;
logger.debug("Base64 zipped upload size: " + zippedUploadSizeBytes + " bytes");
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
const numResultInSarif = countResultsInSarif(sarifPayload);
logger.debug("Number of results in upload: " + numResultInSarif);
logger.debug(`Number of results in upload: ${numResultInSarif}`);
// Make the upload
await uploadPayload(payload, repositoryNwo, githubAuth, githubUrl, mode, logger);
await uploadPayload(
payload,
repositoryNwo,
githubAuth,
githubUrl,
mode,
logger
);
return {
raw_upload_size_bytes: rawUploadSizeBytes,

View File

@@ -1,58 +1,75 @@
import * as core from '@actions/core';
import * as core from "@actions/core";
import { getActionsLogger } from './logging';
import { parseRepositoryNwo } from './repository';
import * as upload_lib from './upload-lib';
import * as util from './util';
import { getActionsLogger } from "./logging";
import { parseRepositoryNwo } from "./repository";
import * as upload_lib from "./upload-lib";
import * as util from "./util";
interface UploadSarifStatusReport extends util.StatusReportBase, upload_lib.UploadStatusReport {}
interface UploadSarifStatusReport
extends util.StatusReportBase,
upload_lib.UploadStatusReport {}
async function sendSuccessStatusReport(startedAt: Date, uploadStats: upload_lib.UploadStatusReport) {
const statusReportBase = await util.createStatusReportBase('upload-sarif', 'success', startedAt);
async function sendSuccessStatusReport(
startedAt: Date,
uploadStats: upload_lib.UploadStatusReport
) {
const statusReportBase = await util.createStatusReportBase(
"upload-sarif",
"success",
startedAt
);
const statusReport: UploadSarifStatusReport = {
...statusReportBase,
... uploadStats,
...uploadStats,
};
await util.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
if (!await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'starting', startedAt), true)) {
if (
!(await util.sendStatusReport(
await util.createStatusReportBase("upload-sarif", "starting", startedAt),
true
))
) {
return;
}
try {
const uploadStats = await upload_lib.upload(
core.getInput('sarif_file'),
parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')),
core.getInput("sarif_file"),
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
await util.getCommitOid(),
util.getRef(),
await util.getAnalysisKey(),
util.getRequiredEnvParam('GITHUB_WORKFLOW'),
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
util.getWorkflowRunID(),
core.getInput('checkout_path'),
core.getInput('matrix'),
core.getInput('token'),
util.getRequiredEnvParam('GITHUB_SERVER_URL'),
'actions',
getActionsLogger());
core.getInput("checkout_path"),
core.getInput("matrix"),
core.getInput("token"),
util.getRequiredEnvParam("GITHUB_SERVER_URL"),
"actions",
getActionsLogger()
);
await sendSuccessStatusReport(startedAt, uploadStats);
} catch (error) {
core.setFailed(error.message);
console.log(error);
await util.sendStatusReport(await util.createStatusReportBase(
'upload-sarif',
'failure',
startedAt,
error.message,
error.stack));
await util.sendStatusReport(
await util.createStatusReportBase(
"upload-sarif",
"failure",
startedAt,
error.message,
error.stack
)
);
return;
}
}
run().catch(e => {
core.setFailed("codeql/upload-sarif action failed: " + e);
run().catch((e) => {
core.setFailed(`codeql/upload-sarif action failed: ${e}`);
console.log(e);
});

View File

@@ -1,21 +1,23 @@
import test from 'ava';
import * as fs from 'fs';
import test from "ava";
import * as fs from "fs";
import * as os from "os";
import { getRunnerLogger } from './logging';
import {setupTests} from './testing-utils';
import * as util from './util';
import { getRunnerLogger } from "./logging";
import { setupTests } from "./testing-utils";
import * as util from "./util";
setupTests(test);
test('getToolNames', t => {
const input = fs.readFileSync(__dirname + '/../src/testdata/tool-names.sarif', 'utf8');
test("getToolNames", (t) => {
const input = fs.readFileSync(
`${__dirname}/../src/testdata/tool-names.sarif`,
"utf8"
);
const toolNames = util.getToolNames(input);
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
});
test('getMemoryFlag() should return the correct --ram flag', t => {
test("getMemoryFlag() should return the correct --ram flag", (t) => {
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
const tests = {
@@ -29,14 +31,13 @@ test('getMemoryFlag() should return the correct --ram flag', t => {
}
});
test('getMemoryFlag() throws if the ram input is < 0 or NaN', t => {
test("getMemoryFlag() throws if the ram input is < 0 or NaN", (t) => {
for (const input of ["-1", "hello!"]) {
t.throws(() => util.getMemoryFlag(input));
}
});
test('getAddSnippetsFlag() should return the correct flag', t => {
test("getAddSnippetsFlag() should return the correct flag", (t) => {
t.deepEqual(util.getAddSnippetsFlag(true), "--sarif-add-snippets");
t.deepEqual(util.getAddSnippetsFlag("true"), "--sarif-add-snippets");
@@ -44,18 +45,16 @@ test('getAddSnippetsFlag() should return the correct flag', t => {
t.deepEqual(util.getAddSnippetsFlag(undefined), "--no-sarif-add-snippets");
t.deepEqual(util.getAddSnippetsFlag("false"), "--no-sarif-add-snippets");
t.deepEqual(util.getAddSnippetsFlag("foo bar"), "--no-sarif-add-snippets");
});
test('getThreadsFlag() should return the correct --threads flag', t => {
test("getThreadsFlag() should return the correct --threads flag", (t) => {
const numCpus = os.cpus().length;
const tests = {
"0": "--threads=0",
"1": "--threads=1",
[`${numCpus + 1}`]: `--threads=${numCpus}`,
[`${-numCpus - 1}`]: `--threads=${-numCpus}`
[`${-numCpus - 1}`]: `--threads=${-numCpus}`,
};
for (const [input, expectedFlag] of Object.entries(tests)) {
@@ -64,68 +63,68 @@ test('getThreadsFlag() should return the correct --threads flag', t => {
}
});
test('getThreadsFlag() throws if the threads input is not an integer', t => {
test("getThreadsFlag() throws if the threads input is not an integer", (t) => {
t.throws(() => util.getThreadsFlag("hello!", getRunnerLogger(true)));
});
test('getRef() throws on the empty string', t => {
test("getRef() throws on the empty string", (t) => {
process.env["GITHUB_REF"] = "";
t.throws(util.getRef);
});
test('isLocalRun() runs correctly', t => {
test("isLocalRun() runs correctly", (t) => {
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
process.env.CODEQL_LOCAL_RUN = '';
process.env.CODEQL_LOCAL_RUN = "";
t.assert(!util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = 'false';
process.env.CODEQL_LOCAL_RUN = "false";
t.assert(!util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = '0';
process.env.CODEQL_LOCAL_RUN = "0";
t.assert(!util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = 'true';
process.env.CODEQL_LOCAL_RUN = "true";
t.assert(util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = 'hucairz';
process.env.CODEQL_LOCAL_RUN = "hucairz";
t.assert(util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = origLocalRun;
});
test('prepareEnvironment() when a local run', t => {
test("prepareEnvironment() when a local run", (t) => {
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
process.env.CODEQL_LOCAL_RUN = 'false';
process.env.GITHUB_JOB = 'YYY';
process.env.CODEQL_LOCAL_RUN = "false";
process.env.GITHUB_JOB = "YYY";
util.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, 'YYY');
t.deepEqual(process.env.GITHUB_JOB, "YYY");
process.env.CODEQL_LOCAL_RUN = 'true';
process.env.CODEQL_LOCAL_RUN = "true";
util.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, 'YYY');
t.deepEqual(process.env.GITHUB_JOB, "YYY");
process.env.GITHUB_JOB = '';
process.env.GITHUB_JOB = "";
util.prepareLocalRunEnvironment();
// updated
t.deepEqual(process.env.GITHUB_JOB, 'UNKNOWN-JOB');
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
process.env.CODEQL_LOCAL_RUN = origLocalRun;
});
test('getExtraOptionsEnvParam() succeeds on valid JSON with invalid options (for now)', t => {
test("getExtraOptionsEnvParam() succeeds on valid JSON with invalid options (for now)", (t) => {
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
const options = {foo: 42};
const options = { foo: 42 };
process.env.CODEQL_ACTION_EXTRA_OPTIONS = JSON.stringify(options);
@@ -134,20 +133,18 @@ test('getExtraOptionsEnvParam() succeeds on valid JSON with invalid options (for
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
});
test('getExtraOptionsEnvParam() succeeds on valid options', t => {
test("getExtraOptionsEnvParam() succeeds on valid options", (t) => {
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
const options = { database: { init: ["--debug"] } };
process.env.CODEQL_ACTION_EXTRA_OPTIONS =
JSON.stringify(options);
process.env.CODEQL_ACTION_EXTRA_OPTIONS = JSON.stringify(options);
t.deepEqual(util.getExtraOptionsEnvParam(), options);
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
});
test('getExtraOptionsEnvParam() fails on invalid JSON', t => {
test("getExtraOptionsEnvParam() fails on invalid JSON", (t) => {
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
process.env.CODEQL_ACTION_EXTRA_OPTIONS = "{{invalid-json}}";

View File

@@ -1,34 +1,33 @@
import * as core from '@actions/core';
import * as toolrunnner from '@actions/exec/lib/toolrunner';
import * as core from "@actions/core";
import * as toolrunnner from "@actions/exec/lib/toolrunner";
import * as fs from "fs";
import * as os from 'os';
import * as path from 'path';
import * as os from "os";
import * as path from "path";
import * as api from './api-client';
import { Language } from './languages';
import { Logger } from './logging';
import * as sharedEnv from './shared-environment';
import * as api from "./api-client";
import { Language } from "./languages";
import { Logger } from "./logging";
import * as sharedEnv from "./shared-environment";
/**
* Are we running on actions, or not.
*/
export type Mode = 'actions' | 'runner';
export type Mode = "actions" | "runner";
/**
* The URL for github.com.
*/
export const GITHUB_DOTCOM_URL = "https://github.com";
/**
* Get an environment parameter, but throw an error if it is not set.
*/
export function getRequiredEnvParam(paramName: string): string {
const value = process.env[paramName];
if (value === undefined || value.length === 0) {
throw new Error(paramName + ' environment variable must be set');
throw new Error(`${paramName} environment variable must be set`);
}
core.debug(paramName + '=' + value);
core.debug(`${paramName}=${value}`);
return value;
}
@@ -36,7 +35,7 @@ export function getRequiredEnvParam(paramName: string): string {
* Get the extra options for the codeql commands.
*/
export function getExtraOptionsEnvParam(): object {
const varName = 'CODEQL_ACTION_EXTRA_OPTIONS';
const varName = "CODEQL_ACTION_EXTRA_OPTIONS";
const raw = process.env[varName];
if (raw === undefined || raw.length === 0) {
return {};
@@ -45,17 +44,17 @@ export function getExtraOptionsEnvParam(): object {
return JSON.parse(raw);
} catch (e) {
throw new Error(
varName +
' environment variable is set, but does not contain valid JSON: ' +
e.message
`${varName} environment variable is set, but does not contain valid JSON: ${e.message}`
);
}
}
export function isLocalRun(): boolean {
return !!process.env.CODEQL_LOCAL_RUN
&& process.env.CODEQL_LOCAL_RUN !== 'false'
&& process.env.CODEQL_LOCAL_RUN !== '0';
return (
!!process.env.CODEQL_LOCAL_RUN &&
process.env.CODEQL_LOCAL_RUN !== "false" &&
process.env.CODEQL_LOCAL_RUN !== "0"
);
}
/**
@@ -66,9 +65,9 @@ export function prepareLocalRunEnvironment() {
return;
}
core.debug('Action is running locally.');
core.debug("Action is running locally.");
if (!process.env.GITHUB_JOB) {
core.exportVariable('GITHUB_JOB', 'UNKNOWN-JOB');
core.exportVariable("GITHUB_JOB", "UNKNOWN-JOB");
}
}
@@ -84,18 +83,24 @@ export async function getCommitOid(): Promise<string> {
// Even if this does go wrong, it's not a huge problem for the alerts to
// reported on the merge commit.
try {
let commitOid = '';
await new toolrunnner.ToolRunner('git', ['rev-parse', 'HEAD'], {
let commitOid = "";
await new toolrunnner.ToolRunner("git", ["rev-parse", "HEAD"], {
silent: true,
listeners: {
stdout: (data) => { commitOid += data.toString(); },
stderr: (data) => { process.stderr.write(data); }
}
stdout: (data) => {
commitOid += data.toString();
},
stderr: (data) => {
process.stderr.write(data);
},
},
}).exec();
return commitOid.trim();
} catch (e) {
core.info("Failed to call git to get current commit. Continuing with data from environment: " + e);
return getRequiredEnvParam('GITHUB_SHA');
core.info(
`Failed to call git to get current commit. Continuing with data from environment: ${e}`
);
return getRequiredEnvParam("GITHUB_SHA");
}
}
@@ -103,20 +108,23 @@ export async function getCommitOid(): Promise<string> {
* Get the path of the currently executing workflow.
*/
async function getWorkflowPath(): Promise<string> {
const repo_nwo = getRequiredEnvParam('GITHUB_REPOSITORY').split("/");
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
const owner = repo_nwo[0];
const repo = repo_nwo[1];
const run_id = Number(getRequiredEnvParam('GITHUB_RUN_ID'));
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
const apiClient = api.getActionsApiClient();
const runsResponse = await apiClient.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
owner,
repo,
run_id
});
const runsResponse = await apiClient.request(
"GET /repos/:owner/:repo/actions/runs/:run_id",
{
owner,
repo,
run_id,
}
);
const workflowUrl = runsResponse.data.workflow_url;
const workflowResponse = await apiClient.request('GET ' + workflowUrl);
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
return workflowResponse.data.path;
}
@@ -125,9 +133,9 @@ async function getWorkflowPath(): Promise<string> {
* Get the workflow run ID.
*/
export function getWorkflowRunID(): number {
const workflowRunID = parseInt(getRequiredEnvParam('GITHUB_RUN_ID'), 10);
const workflowRunID = parseInt(getRequiredEnvParam("GITHUB_RUN_ID"), 10);
if (Number.isNaN(workflowRunID)) {
throw new Error('GITHUB_RUN_ID must define a non NaN workflow run ID');
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
}
return workflowRunID;
}
@@ -140,7 +148,7 @@ export function getWorkflowRunID(): number {
* the github API, but after that the result will be cached.
*/
export async function getAnalysisKey(): Promise<string> {
const analysisKeyEnvVar = 'CODEQL_ACTION_ANALYSIS_KEY';
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
let analysisKey = process.env[analysisKeyEnvVar];
if (analysisKey !== undefined) {
@@ -148,9 +156,9 @@ export async function getAnalysisKey(): Promise<string> {
}
const workflowPath = await getWorkflowPath();
const jobName = getRequiredEnvParam('GITHUB_JOB');
const jobName = getRequiredEnvParam("GITHUB_JOB");
analysisKey = workflowPath + ':' + jobName;
analysisKey = `${workflowPath}:${jobName}`;
core.exportVariable(analysisKeyEnvVar, analysisKey);
return analysisKey;
}
@@ -161,7 +169,7 @@ export async function getAnalysisKey(): Promise<string> {
export function getRef(): string {
// Will be in the form "refs/heads/master" on a push event
// or in the form "refs/pull/N/merge" on a pull_request event
const ref = getRequiredEnvParam('GITHUB_REF');
const ref = getRequiredEnvParam("GITHUB_REF");
// For pull request refs we want to convert from the 'merge' ref
// to the 'head' ref, as that is what we want to analyse.
@@ -169,46 +177,46 @@ export function getRef(): string {
// the checkout, but we have no way of verifying that here.
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
if (pull_ref_regex.test(ref)) {
return ref.replace(pull_ref_regex, 'refs/pull/$1/head');
return ref.replace(pull_ref_regex, "refs/pull/$1/head");
} else {
return ref;
}
}
type ActionName = 'init' | 'autobuild' | 'finish' | 'upload-sarif';
type ActionStatus = 'starting' | 'aborted' | 'success' | 'failure';
type ActionName = "init" | "autobuild" | "finish" | "upload-sarif";
type ActionStatus = "starting" | "aborted" | "success" | "failure";
export interface StatusReportBase {
// ID of the workflow run containing the action run
"workflow_run_id": number;
workflow_run_id: number;
// Workflow name. Converted to analysis_name further down the pipeline.
"workflow_name": string;
workflow_name: string;
// Job name from the workflow
"job_name": string;
job_name: string;
// Analysis key, normally composed from the workflow path and job name
"analysis_key": string;
analysis_key: string;
// Value of the matrix for this instantiation of the job
"matrix_vars"?: string;
matrix_vars?: string;
// Commit oid that the workflow was triggered on
"commit_oid": string;
commit_oid: string;
// Ref that the workflow was triggered on
"ref": string;
ref: string;
// Name of the action being executed
"action_name": ActionName;
action_name: ActionName;
// Version if the action being executed, as a commit oid
"action_oid": string;
action_oid: string;
// Time the first action started. Normally the init action
"started_at": string;
started_at: string;
// Time this action started
"action_started_at": string;
action_started_at: string;
// Time this action completed, or undefined if not yet completed
"completed_at"?: string;
completed_at?: string;
// State this action is currently in
"status": ActionStatus;
status: ActionStatus;
// Cause of the failure (or undefined if status is not failure)
"cause"?: string;
cause?: string;
// Stack trace of the failure (or undefined if status is not failure)
"exception"?: string;
exception?: string;
}
/**
@@ -225,37 +233,39 @@ export async function createStatusReportBase(
status: ActionStatus,
actionStartedAt: Date,
cause?: string,
exception?: string):
Promise<StatusReportBase> {
const commitOid = process.env['GITHUB_SHA'] || '';
exception?: string
): Promise<StatusReportBase> {
const commitOid = process.env["GITHUB_SHA"] || "";
const ref = getRef();
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
let workflowRunID = -1;
if (workflowRunIDStr) {
workflowRunID = parseInt(workflowRunIDStr, 10);
}
const workflowName = process.env['GITHUB_WORKFLOW'] || '';
const jobName = process.env['GITHUB_JOB'] || '';
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
const jobName = process.env["GITHUB_JOB"] || "";
const analysis_key = await getAnalysisKey();
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
if (workflowStartedAt === undefined) {
workflowStartedAt = actionStartedAt.toISOString();
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
core.exportVariable(
sharedEnv.CODEQL_WORKFLOW_STARTED_AT,
workflowStartedAt
);
}
let statusReport: StatusReportBase = {
const statusReport: StatusReportBase = {
workflow_run_id: workflowRunID,
workflow_name: workflowName,
job_name: jobName,
analysis_key: analysis_key,
analysis_key,
commit_oid: commitOid,
ref: ref,
ref,
action_name: actionName,
action_oid: "unknown", // TODO decide if it's possible to fill this in
started_at: workflowStartedAt,
action_started_at: actionStartedAt.toISOString(),
status: status
status,
};
// Add optional parameters
@@ -265,10 +275,10 @@ export async function createStatusReportBase(
if (exception) {
statusReport.exception = exception;
}
if (status === 'success' || status === 'failure' || status === 'aborted') {
if (status === "success" || status === "failure" || status === "aborted") {
statusReport.completed_at = new Date().toISOString();
}
let matrix: string | undefined = core.getInput('matrix');
const matrix: string | undefined = core.getInput("matrix");
if (matrix) {
statusReport.matrix_vars = matrix;
}
@@ -287,8 +297,8 @@ export async function createStatusReportBase(
*/
export async function sendStatusReport<S extends StatusReportBase>(
statusReport: S,
ignoreFailures?: boolean): Promise<boolean> {
ignoreFailures?: boolean
): Promise<boolean> {
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) {
core.debug("Not sending status report to GitHub Enterprise");
return true;
@@ -300,16 +310,19 @@ export async function sendStatusReport<S extends StatusReportBase>(
}
const statusReportJSON = JSON.stringify(statusReport);
core.debug('Sending status report: ' + statusReportJSON);
core.debug(`Sending status report: ${statusReportJSON}`);
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
const [owner, repo] = nwo.split("/");
const client = api.getActionsApiClient();
const statusResponse = await client.request('PUT /repos/:owner/:repo/code-scanning/analysis/status', {
owner: owner,
repo: repo,
data: statusReportJSON,
});
const statusResponse = await client.request(
"PUT /repos/:owner/:repo/code-scanning/analysis/status",
{
owner,
repo,
data: statusReportJSON,
}
);
if (!ignoreFailures) {
// If the status report request fails with a 403 or a 404, then this is a deliberate
@@ -319,11 +332,15 @@ export async function sendStatusReport<S extends StatusReportBase>(
// Other failure responses (or lack thereof) could be transitory and should not
// cause the action to fail.
if (statusResponse.status === 403) {
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
core.setFailed(
"The repo on which this action is running is not opted-in to CodeQL code scanning."
);
return false;
}
if (statusResponse.status === 404) {
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
core.setFailed(
"Not authorized to used the CodeQL code scanning feature on this repo."
);
return false;
}
}
@@ -353,12 +370,14 @@ export function getToolNames(sarifContents: string): string[] {
// Creates a random temporary directory, runs the given body, and then deletes the directory.
// Mostly intended for use within tests.
export async function withTmpDir<T>(body: (tmpDir: string) => Promise<T>): Promise<T> {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
const realSubdir = path.join(tmpDir, 'real');
export async function withTmpDir<T>(
body: (tmpDir: string) => Promise<T>
): Promise<T> {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "codeql-action-"));
const realSubdir = path.join(tmpDir, "real");
fs.mkdirSync(realSubdir);
const symlinkSubdir = path.join(tmpDir, 'symlink');
fs.symlinkSync(realSubdir, symlinkSubdir, 'dir');
const symlinkSubdir = path.join(tmpDir, "symlink");
fs.symlinkSync(realSubdir, symlinkSubdir, "dir");
const result = await body(symlinkSubdir);
fs.rmdirSync(tmpDir, { recursive: true });
return result;
@@ -375,7 +394,7 @@ export function getMemoryFlag(userInput: string | undefined): string {
if (userInput) {
memoryToUseMegaBytes = Number(userInput);
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
throw new Error("Invalid RAM setting \"" + userInput + "\", specified.");
throw new Error(`Invalid RAM setting "${userInput}", specified.`);
}
} else {
const totalMemoryBytes = os.totalmem();
@@ -383,7 +402,7 @@ export function getMemoryFlag(userInput: string | undefined): string {
const systemReservedMemoryMegaBytes = 256;
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
}
return "--ram=" + Math.floor(memoryToUseMegaBytes);
return `--ram=${Math.floor(memoryToUseMegaBytes)}`;
}
/**
@@ -391,12 +410,14 @@ export function getMemoryFlag(userInput: string | undefined): string {
*
* @returns string
*/
export function getAddSnippetsFlag(userInput: string | boolean | undefined): string {
export function getAddSnippetsFlag(
userInput: string | boolean | undefined
): string {
if (typeof userInput === "string") {
// have to process specifically because any non-empty string is truthy
userInput = userInput.toLowerCase() === "true";
}
return userInput ? "--sarif-add-snippets" : "--no-sarif-add-snippets";
return userInput ? "--sarif-add-snippets" : "--no-sarif-add-snippets";
}
/**
@@ -407,7 +428,10 @@ export function getAddSnippetsFlag(userInput: string | boolean | undefined): str
*
* @returns string
*/
export function getThreadsFlag(userInput: string | undefined, logger: Logger): string {
export function getThreadsFlag(
userInput: string | undefined,
logger: Logger
): string {
let numThreads: number;
const maxThreads = os.cpus().length;
if (userInput) {
@@ -416,12 +440,16 @@ export function getThreadsFlag(userInput: string | undefined, logger: Logger): s
throw new Error(`Invalid threads setting "${userInput}", specified.`);
}
if (numThreads > maxThreads) {
logger.info(`Clamping desired number of threads (${numThreads}) to max available (${maxThreads}).`);
logger.info(
`Clamping desired number of threads (${numThreads}) to max available (${maxThreads}).`
);
numThreads = maxThreads;
}
const minThreads = -maxThreads;
if (numThreads < minThreads) {
logger.info(`Clamping desired number of free threads (${numThreads}) to max available (${minThreads}).`);
logger.info(
`Clamping desired number of free threads (${numThreads}) to max available (${minThreads}).`
);
numThreads = minThreads;
}
} else {
@@ -435,7 +463,7 @@ export function getThreadsFlag(userInput: string | undefined, logger: Logger): s
* Get the directory where CodeQL databases should be placed.
*/
export function getCodeQLDatabasesDir(tempDir: string) {
return path.resolve(tempDir, 'codeql_databases');
return path.resolve(tempDir, "codeql_databases");
}
/**