Compare commits

..

2 Commits

Author SHA1 Message Date
David Verdeguer
807be914ae Test config examples 2020-05-17 22:51:10 +02:00
David Verdeguer
6a82723ea1 Add examples files 2020-05-17 10:32:45 +02:00
27 changed files with 178 additions and 751 deletions

View File

@@ -13,5 +13,6 @@ jobs:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- uses: ./init - uses: ./init
with: with:
languages: javascript
config-file: ./.github/codeql/codeql-config.yml config-file: ./.github/codeql/codeql-config.yml
- uses: ./analyze - uses: ./analyze

View File

@@ -84,7 +84,7 @@ The CodeQL action should be run on `push` events, and on a `schedule`. `Push` ev
### Configuration ### Configuration
You may optionally specify additional queries for CodeQL to execute by using a config file. The queries must belong to a [QL pack](https://help.semmle.com/codeql/codeql-cli/reference/qlpack-overview.html) and can be in your repository or any public repository. You can choose a single .ql file, a folder containing multiple .ql files, a .qls [query suite](https://help.semmle.com/codeql/codeql-cli/procedures/query-suites.html) file, or any combination of the above. To use queries stored in your repository or from other repositories use the same syntax as when [using an action](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepsuses). Note that when using local queries starting with `./`, the path is relative to the root of the repository and not to the location of the config file. You may optionally specify additional queries for CodeQL to execute by using a config file. The queries must belong to a [QL pack](https://help.semmle.com/codeql/codeql-cli/reference/qlpack-overview.html) and can be in your repository or any public repository. You can choose a single .ql file, a folder containing multiple .ql files, a .qls [query suite](https://help.semmle.com/codeql/codeql-cli/procedures/query-suites.html) file, or any combination of the above. To use queries from other repositories use the same syntax as when [using an action](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepsuses).
You can disable the default queries using `disable-default-queries: true`. You can disable the default queries using `disable-default-queries: true`.

View File

@@ -12,9 +12,6 @@ inputs:
description: Upload the SARIF file description: Upload the SARIF file
required: false required: false
default: true default: true
ram:
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
required: false
token: token:
default: ${{ github.token }} default: ${{ github.token }}
matrix: matrix:

View File

@@ -0,0 +1,4 @@
name: extended-cpp-queryset
queries:
- name: Additional C++ queries
uses: github/codeql/cpp/ql/src/codeql-suites/cpp-lgtm.qls@master

View File

@@ -0,0 +1,4 @@
name: extended-csharp-queryset
queries:
- name: Additional C# queries
uses: github/codeql/csharp/ql/src/codeql-suites/csharp-lgtm.qls@master

View File

@@ -0,0 +1,4 @@
name: extended-go-queryset
queries:
- name: Additional Go queries
uses: github/codeql-go/ql/src/codeql-suites/go-lgtm.qls@master

View File

@@ -0,0 +1,4 @@
name: extended-java-queryset
queries:
- name: Additional Java queries
uses: github/codeql/java/ql/src/codeql-suites/java-lgtm.qls@master

View File

@@ -0,0 +1,4 @@
name: extended-javascript-queryset
queries:
- name: Additional Javascript queries
uses: github/codeql/javascript/ql/src/codeql-suites/javascript-lgtm.qls@master

View File

@@ -0,0 +1,4 @@
name: extended-python-queryset
queries:
- name: Additional Python queries
uses: github/codeql/python/ql/src/codeql-suites/python-lgtm.qls@master

View File

@@ -8,7 +8,7 @@ inputs:
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200427/codeql-bundle.tar.gz default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200427/codeql-bundle.tar.gz
languages: languages:
description: The languages to be analysed description: The languages to be analysed
required: true required: false
token: token:
default: ${{ github.token }} default: ${{ github.token }}
config-file: config-file:

65
lib/config-utils.js generated
View File

@@ -12,7 +12,6 @@ const io = __importStar(require("@actions/io"));
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const yaml = __importStar(require("js-yaml")); const yaml = __importStar(require("js-yaml"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const util = __importStar(require("./util"));
class ExternalQuery { class ExternalQuery {
constructor(repository, ref) { constructor(repository, ref) {
this.path = ''; this.path = '';
@@ -33,33 +32,27 @@ class Config {
addQuery(queryUses) { addQuery(queryUses) {
// The logic for parsing the string is based on what actions does for // The logic for parsing the string is based on what actions does for
// parsing the 'uses' actions in the workflow file // parsing the 'uses' actions in the workflow file
queryUses = queryUses.trim();
if (queryUses === "") { if (queryUses === "") {
throw new Error(getQueryUsesBlank()); throw '"uses" value for queries cannot be blank';
} }
// Check for the local path case before we start trying to parse the repository name
if (queryUses.startsWith("./")) { if (queryUses.startsWith("./")) {
this.additionalQueries.push(queryUses.slice(2)); this.additionalQueries.push(queryUses.slice(2));
return; return;
} }
let tok = queryUses.split('@'); let tok = queryUses.split('@');
if (tok.length !== 2) { if (tok.length !== 2) {
throw new Error(getQueryUsesIncorrect(queryUses)); throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
} }
const ref = tok[1]; const ref = tok[1];
tok = tok[0].split('/'); tok = tok[0].split('/');
// The first token is the owner // The first token is the owner
// The second token is the repo // The second token is the repo
// The rest is a path, if there is more than one token combine them to form the full path // The rest is a path, if there is more than one token combine them to form the full path
if (tok.length < 2) {
throw new Error(getQueryUsesIncorrect(queryUses));
}
if (tok.length > 3) { if (tok.length > 3) {
tok = [tok[0], tok[1], tok.slice(2).join('/')]; tok = [tok[0], tok[1], tok.slice(2).join('/')];
} }
// Check none of the parts of the repository name are empty if (tok.length < 2) {
if (tok[0].trim() === '' || tok[1].trim() === '') { throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
throw new Error(getQueryUsesIncorrect(queryUses));
} }
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref); let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
if (tok.length === 3) { if (tok.length === 3) {
@@ -69,41 +62,16 @@ class Config {
} }
} }
exports.Config = Config; exports.Config = Config;
function getQueryUsesBlank() { const configFolder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
return '"uses" value for queries cannot be blank';
}
exports.getQueryUsesBlank = getQueryUsesBlank;
function getQueryUsesIncorrect(queryUses) {
return '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
}
exports.getQueryUsesIncorrect = getQueryUsesIncorrect;
function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
return 'The configuration file "' + configFile + '" is outside of the workspace';
}
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
function getConfigFileDoesNotExistErrorMessage(configFile) {
return 'The configuration file "' + configFile + '" does not exist';
}
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
function initConfig() { function initConfig() {
let configFile = core.getInput('config-file'); const configFile = core.getInput('config-file');
const config = new Config(); const config = new Config();
// If no config file was provided create an empty one // If no config file was provided create an empty one
if (configFile === '') { if (configFile === '') {
core.debug('No configuration file was provided'); core.debug('No configuration file was provided');
return config; return config;
} }
// Treat the config file as relative to the workspace try {
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
configFile = path.resolve(workspacePath, configFile);
// Error if the config file is now outside of the workspace
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
}
// Error if the file does not exist
if (!fs.existsSync(configFile)) {
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
}
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8')); const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
if (parsedYAML.name && typeof parsedYAML.name === "string") { if (parsedYAML.name && typeof parsedYAML.name === "string") {
config.name = parsedYAML.name; config.name = parsedYAML.name;
@@ -114,7 +82,7 @@ function initConfig() {
const queries = parsedYAML.queries; const queries = parsedYAML.queries;
if (queries && queries instanceof Array) { if (queries && queries instanceof Array) {
queries.forEach(query => { queries.forEach(query => {
if (typeof query.uses === "string") { if (query.uses && typeof query.uses === "string") {
config.addQuery(query.uses); config.addQuery(query.uses);
} }
}); });
@@ -135,24 +103,21 @@ function initConfig() {
} }
}); });
} }
}
catch (err) {
core.setFailed(err);
}
return config; return config;
} }
function getConfigFolder() {
return util.getRequiredEnvParam('RUNNER_WORKSPACE');
}
function getConfigFile() {
return path.join(getConfigFolder(), 'config');
}
exports.getConfigFile = getConfigFile;
async function saveConfig(config) { async function saveConfig(config) {
const configString = JSON.stringify(config); const configString = JSON.stringify(config);
await io.mkdirP(getConfigFolder()); await io.mkdirP(configFolder);
fs.writeFileSync(getConfigFile(), configString, 'utf8'); fs.writeFileSync(path.join(configFolder, 'config'), configString, 'utf8');
core.debug('Saved config:'); core.debug('Saved config:');
core.debug(configString); core.debug(configString);
} }
async function loadConfig() { async function loadConfig() {
const configFile = getConfigFile(); const configFile = path.join(configFolder, 'config');
if (fs.existsSync(configFile)) { if (fs.existsSync(configFile)) {
const configString = fs.readFileSync(configFile, 'utf8'); const configString = fs.readFileSync(configFile, 'utf8');
core.debug('Loaded config:'); core.debug('Loaded config:');

File diff suppressed because one or more lines are too long

224
lib/config-utils.test.js generated
View File

@@ -1,224 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const configUtils = __importStar(require("./config-utils"));
const util = __importStar(require("./util"));
function setInput(name, value) {
// Transformation copied from
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
if (value !== undefined) {
process.env[envVar] = value;
}
else {
delete process.env[envVar];
}
}
ava_1.default("load empty config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', undefined);
const config = await configUtils.loadConfig();
t.deepEqual(config, new configUtils.Config());
});
});
ava_1.default("loading config saves config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
const configFile = configUtils.getConfigFile();
// Sanity check the saved config file does not already exist
t.false(fs.existsSync(configFile));
const config = await configUtils.loadConfig();
// The saved config file should now exist
t.true(fs.existsSync(configFile));
// And the contents should parse correctly to the config that was returned
t.deepEqual(fs.readFileSync(configFile, 'utf8'), JSON.stringify(config));
});
});
ava_1.default("load input outside of workspace", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', '../input');
try {
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
}
});
});
ava_1.default("load non-existent input", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
t.false(fs.existsSync(path.join(tmpDir, 'input')));
setInput('config-file', 'input');
try {
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
}
});
});
ava_1.default("load non-empty input", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// Just create a generic config object with non-default values for all fields
const inputFileContents = `
name: my config
disable-default-queries: true
queries:
- uses: ./foo
- uses: foo/bar@dev
paths-ignore:
- a
- b
paths:
- c/d`;
// And the config we expect it to parse to
const expectedConfig = new configUtils.Config();
expectedConfig.name = 'my config';
expectedConfig.disableDefaultQueries = true;
expectedConfig.additionalQueries.push('foo');
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
expectedConfig.pathsIgnore = ['a', 'b'];
expectedConfig.paths = ['c/d'];
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
const actualConfig = await configUtils.loadConfig();
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
});
ava_1.default("load partially invalid input", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// The valid parts of this config should be parsed correctly.
// The invalid parts should be ignored and left as the default values.
const inputFileContents = `
name:
- foo: bar
disable-default-queries: 42
queries:
- name: foo/bar
uses: foo/bar@dev
paths-ignore:
- a
- b
paths:
- c/d`;
// And the config we expect it to parse to
const expectedConfig = new configUtils.Config();
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
expectedConfig.pathsIgnore = ['a', 'b'];
expectedConfig.paths = ['c/d'];
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
const actualConfig = await configUtils.loadConfig();
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
});
ava_1.default("load invalid input - top level entries", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// Replace the arrays with strings or numbers.
// The invalid parts should be ignored and left as the default values.
const inputFileContents = `
name: my config
disable-default-queries: true
queries: foo
paths-ignore: bar
paths: 123`;
// And the config we expect it to parse to
const expectedConfig = new configUtils.Config();
expectedConfig.name = 'my config';
expectedConfig.disableDefaultQueries = true;
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
const actualConfig = await configUtils.loadConfig();
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
});
ava_1.default("load invalid input - queries field type", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// Invalid contents of the "queries" array.
// The invalid parts should be ignored and left as the default values.
const inputFileContents = `
name: my config
disable-default-queries: true
queries:
- name: foo
uses:
- hello: world
- name: bar
uses: github/bar@master`;
// And the config we expect it to parse to
const expectedConfig = new configUtils.Config();
expectedConfig.name = 'my config';
expectedConfig.disableDefaultQueries = true;
expectedConfig.externalQueries.push(new configUtils.ExternalQuery("github/bar", "master"));
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
const actualConfig = await configUtils.loadConfig();
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
});
// Various "uses" fields, and the errors they should produce
const testInputs = {
"''": configUtils.getQueryUsesBlank(),
"foo/bar": configUtils.getQueryUsesIncorrect("foo/bar"),
"foo/bar@v1@v2": configUtils.getQueryUsesIncorrect("foo/bar@v1@v2"),
"foo@master": configUtils.getQueryUsesIncorrect("foo@master"),
"https://github.com/foo/bar@master": configUtils.getQueryUsesIncorrect("https://github.com/foo/bar@master")
};
for (const [input, result] of Object.entries(testInputs)) {
ava_1.default("load invalid input - queries uses \"" + input + "\"", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// Invalid contents of a "queries.uses" field.
// Should fail with the expected error message
const inputFileContents = `
name: my config
queries:
- name: foo
uses: ` + input;
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
try {
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(result));
}
});
});
}
//# sourceMappingURL=config-utils.test.js.map

File diff suppressed because one or more lines are too long

19
lib/finalize-db.js generated
View File

@@ -11,30 +11,12 @@ const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec")); const exec = __importStar(require("@actions/exec"));
const io = __importStar(require("@actions/io")); const io = __importStar(require("@actions/io"));
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const configUtils = __importStar(require("./config-utils")); const configUtils = __importStar(require("./config-utils"));
const externalQueries = __importStar(require("./external-queries")); const externalQueries = __importStar(require("./external-queries"));
const sharedEnv = __importStar(require("./shared-environment")); const sharedEnv = __importStar(require("./shared-environment"));
const upload_lib = __importStar(require("./upload-lib")); const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
function getMemoryFlag() {
let memoryToUseMegaBytes;
const memoryToUseString = core.getInput("ram");
if (memoryToUseString) {
memoryToUseMegaBytes = Number(memoryToUseString);
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
}
}
else {
const totalMemoryBytes = os.totalmem();
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
const systemReservedMemoryMegaBytes = 256;
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
}
return "--ram=" + Math.floor(memoryToUseMegaBytes);
}
async function createdDBForScannedLanguages(codeqlCmd, databaseFolder) { async function createdDBForScannedLanguages(codeqlCmd, databaseFolder) {
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES]; const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
if (scannedLanguages) { if (scannedLanguages) {
@@ -115,7 +97,6 @@ async function runQueries(codeqlCmd, databaseFolder, sarifFolder, config) {
await exec.exec(codeqlCmd, [ await exec.exec(codeqlCmd, [
'database', 'database',
'analyze', 'analyze',
getMemoryFlag(),
path.join(databaseFolder, database), path.join(databaseFolder, database),
'--format=sarif-latest', '--format=sarif-latest',
'--output=' + sarifFile, '--output=' + sarifFile,

File diff suppressed because one or more lines are too long

38
lib/upload-lib.js generated
View File

@@ -13,6 +13,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core")); const core = __importStar(require("@actions/core"));
const http = __importStar(require("@actions/http-client")); const http = __importStar(require("@actions/http-client"));
const auth = __importStar(require("@actions/http-client/auth")); const auth = __importStar(require("@actions/http-client/auth"));
const io = __importStar(require("@actions/io"));
const file_url_1 = __importDefault(require("file-url")); const file_url_1 = __importDefault(require("file-url"));
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
@@ -20,6 +21,18 @@ const zlib_1 = __importDefault(require("zlib"));
const fingerprints = __importStar(require("./fingerprints")); const fingerprints = __importStar(require("./fingerprints"));
const sharedEnv = __importStar(require("./shared-environment")); const sharedEnv = __importStar(require("./shared-environment"));
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
// Construct the location of the sentinel file for detecting multiple uploads.
// The returned location should be writable.
async function getSentinelFilePath() {
// Use the temp dir instead of placing next to the sarif file because of
// issues with docker actions. The directory containing the sarif file
// may not be writable by us.
const uploadsTmpDir = path.join(process.env['RUNNER_TEMP'] || '/tmp/codeql-action', 'uploads');
await io.mkdirP(uploadsTmpDir);
// Hash the absolute path so we'll behave correctly in the unlikely
// scenario a file is referenced twice with different paths.
return path.join(uploadsTmpDir, 'codeql-action-upload-sentinel');
}
// Takes a list of paths to sarif files and combines them together, // Takes a list of paths to sarif files and combines them together,
// returning the contents of the combined sarif file. // returning the contents of the combined sarif file.
function combineSarifFiles(sarifFiles) { function combineSarifFiles(sarifFiles) {
@@ -112,27 +125,20 @@ async function upload(input) {
} }
} }
exports.upload = upload; exports.upload = upload;
// Counts the number of results in the given SARIF file
function countResultsInSarif(sarif) {
let numResults = 0;
for (const run of JSON.parse(sarif).runs) {
numResults += run.results.length;
}
return numResults;
}
exports.countResultsInSarif = countResultsInSarif;
// Uploads the given set of sarif files. // Uploads the given set of sarif files.
// Returns true iff the upload occurred and succeeded // Returns true iff the upload occurred and succeeded
async function uploadFiles(sarifFiles) { async function uploadFiles(sarifFiles) {
core.startGroup("Uploading results"); core.startGroup("Uploading results");
let succeeded = false; let succeeded = false;
try { try {
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF"; // Check if an upload has happened before. If so then abort.
if (process.env[sentinelEnvVar]) { // This is intended to catch when the finish and upload-sarif actions
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job"); // are used together, and then the upload-sarif action is invoked twice.
const sentinelFile = await getSentinelFilePath();
if (fs.existsSync(sentinelFile)) {
core.info("Aborting as an upload has already happened from this job");
return false; return false;
} }
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
const commitOid = util.getRequiredEnvParam('GITHUB_SHA'); const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID'); const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
const ref = util.getRef(); const ref = util.getRef();
@@ -167,12 +173,10 @@ async function uploadFiles(sarifFiles) {
"started_at": startedAt, "started_at": startedAt,
"tool_names": toolNames, "tool_names": toolNames,
}); });
// Log some useful debug info about the info
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
// Make the upload // Make the upload
succeeded = await uploadPayload(payload); succeeded = await uploadPayload(payload);
// Mark that we have made an upload
fs.writeFileSync(sentinelFile, '');
} }
catch (error) { catch (error) {
core.setFailed(error.message); core.setFailed(error.message);

File diff suppressed because one or more lines are too long

3
lib/util.js generated
View File

@@ -337,9 +337,8 @@ exports.getToolNames = getToolNames;
// Mostly intended for use within tests. // Mostly intended for use within tests.
async function withTmpDir(body) { async function withTmpDir(body) {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-')); const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
const result = await body(tmpDir); await body(tmpDir);
fs.rmdirSync(tmpDir, { recursive: true }); fs.rmdirSync(tmpDir, { recursive: true });
return result;
} }
exports.withTmpDir = withTmpDir; exports.withTmpDir = withTmpDir;
//# sourceMappingURL=util.js.map //# sourceMappingURL=util.js.map

File diff suppressed because one or more lines are too long

View File

@@ -5,7 +5,7 @@
"description": "CodeQL action", "description": "CodeQL action",
"scripts": { "scripts": {
"build": "tsc", "build": "tsc",
"test": "ava src/** --serial", "test": "ava src/**",
"lint": "tslint -p . -c tslint.json 'src/**/*.ts'", "lint": "tslint -p . -c tslint.json 'src/**/*.ts'",
"removeNPMAbsolutePaths": "removeNPMAbsolutePaths . --force" "removeNPMAbsolutePaths": "removeNPMAbsolutePaths . --force"
}, },

View File

@@ -1,253 +0,0 @@
import test from 'ava';
import * as fs from 'fs';
import * as path from 'path';
import * as configUtils from './config-utils';
import * as util from './util';
function setInput(name: string, value: string | undefined) {
// Transformation copied from
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
if (value !== undefined) {
process.env[envVar] = value;
} else {
delete process.env[envVar];
}
}
test("load empty config", async t => {
return await util.withTmpDir(async tmpDir => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', undefined);
const config = await configUtils.loadConfig();
t.deepEqual(config, new configUtils.Config());
});
});
test("loading config saves config", async t => {
return await util.withTmpDir(async tmpDir => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
const configFile = configUtils.getConfigFile();
// Sanity check the saved config file does not already exist
t.false(fs.existsSync(configFile));
const config = await configUtils.loadConfig();
// The saved config file should now exist
t.true(fs.existsSync(configFile));
// And the contents should parse correctly to the config that was returned
t.deepEqual(fs.readFileSync(configFile, 'utf8'), JSON.stringify(config));
});
});
test("load input outside of workspace", async t => {
return await util.withTmpDir(async tmpDir => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', '../input');
try {
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
} catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
}
});
});
test("load non-existent input", async t => {
return await util.withTmpDir(async tmpDir => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
t.false(fs.existsSync(path.join(tmpDir, 'input')));
setInput('config-file', 'input');
try {
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
} catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
}
});
});
test("load non-empty input", async t => {
return await util.withTmpDir(async tmpDir => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// Just create a generic config object with non-default values for all fields
const inputFileContents = `
name: my config
disable-default-queries: true
queries:
- uses: ./foo
- uses: foo/bar@dev
paths-ignore:
- a
- b
paths:
- c/d`;
// And the config we expect it to parse to
const expectedConfig = new configUtils.Config();
expectedConfig.name = 'my config';
expectedConfig.disableDefaultQueries = true;
expectedConfig.additionalQueries.push('foo');
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
expectedConfig.pathsIgnore = ['a', 'b'];
expectedConfig.paths = ['c/d'];
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
const actualConfig = await configUtils.loadConfig();
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
});
test("load partially invalid input", async t => {
return await util.withTmpDir(async tmpDir => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// The valid parts of this config should be parsed correctly.
// The invalid parts should be ignored and left as the default values.
const inputFileContents = `
name:
- foo: bar
disable-default-queries: 42
queries:
- name: foo/bar
uses: foo/bar@dev
paths-ignore:
- a
- b
paths:
- c/d`;
// And the config we expect it to parse to
const expectedConfig = new configUtils.Config();
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
expectedConfig.pathsIgnore = ['a', 'b'];
expectedConfig.paths = ['c/d'];
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
const actualConfig = await configUtils.loadConfig();
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
});
test("load invalid input - top level entries", async t => {
return await util.withTmpDir(async tmpDir => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// Replace the arrays with strings or numbers.
// The invalid parts should be ignored and left as the default values.
const inputFileContents = `
name: my config
disable-default-queries: true
queries: foo
paths-ignore: bar
paths: 123`;
// And the config we expect it to parse to
const expectedConfig = new configUtils.Config();
expectedConfig.name = 'my config';
expectedConfig.disableDefaultQueries = true;
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
const actualConfig = await configUtils.loadConfig();
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
});
test("load invalid input - queries field type", async t => {
return await util.withTmpDir(async tmpDir => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// Invalid contents of the "queries" array.
// The invalid parts should be ignored and left as the default values.
const inputFileContents = `
name: my config
disable-default-queries: true
queries:
- name: foo
uses:
- hello: world
- name: bar
uses: github/bar@master`;
// And the config we expect it to parse to
const expectedConfig = new configUtils.Config();
expectedConfig.name = 'my config';
expectedConfig.disableDefaultQueries = true;
expectedConfig.externalQueries.push(new configUtils.ExternalQuery("github/bar", "master"));
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
const actualConfig = await configUtils.loadConfig();
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
});
// Various "uses" fields, and the errors they should produce
const testInputs = {
"''": configUtils.getQueryUsesBlank(),
"foo/bar": configUtils.getQueryUsesIncorrect("foo/bar"),
"foo/bar@v1@v2": configUtils.getQueryUsesIncorrect("foo/bar@v1@v2"),
"foo@master": configUtils.getQueryUsesIncorrect("foo@master"),
"https://github.com/foo/bar@master": configUtils.getQueryUsesIncorrect("https://github.com/foo/bar@master")
};
for (const [input, result] of Object.entries(testInputs)) {
test("load invalid input - queries uses \"" + input + "\"", async t => {
return await util.withTmpDir(async tmpDir => {
process.env['RUNNER_WORKSPACE'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// Invalid contents of a "queries.uses" field.
// Should fail with the expected error message
const inputFileContents = `
name: my config
queries:
- name: foo
uses: ` + input;
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
try {
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
} catch (err) {
t.deepEqual(err, new Error(result));
}
});
});
}

View File

@@ -4,8 +4,6 @@ import * as fs from 'fs';
import * as yaml from 'js-yaml'; import * as yaml from 'js-yaml';
import * as path from 'path'; import * as path from 'path';
import * as util from './util';
export class ExternalQuery { export class ExternalQuery {
public repository: string; public repository: string;
public ref: string; public ref: string;
@@ -28,12 +26,11 @@ export class Config {
public addQuery(queryUses: string) { public addQuery(queryUses: string) {
// The logic for parsing the string is based on what actions does for // The logic for parsing the string is based on what actions does for
// parsing the 'uses' actions in the workflow file // parsing the 'uses' actions in the workflow file
queryUses = queryUses.trim();
if (queryUses === "") { if (queryUses === "") {
throw new Error(getQueryUsesBlank()); throw '"uses" value for queries cannot be blank';
} }
// Check for the local path case before we start trying to parse the repository name
if (queryUses.startsWith("./")) { if (queryUses.startsWith("./")) {
this.additionalQueries.push(queryUses.slice(2)); this.additionalQueries.push(queryUses.slice(2));
return; return;
@@ -41,7 +38,7 @@ export class Config {
let tok = queryUses.split('@'); let tok = queryUses.split('@');
if (tok.length !== 2) { if (tok.length !== 2) {
throw new Error(getQueryUsesIncorrect(queryUses)); throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
} }
const ref = tok[1]; const ref = tok[1];
@@ -49,16 +46,12 @@ export class Config {
// The first token is the owner // The first token is the owner
// The second token is the repo // The second token is the repo
// The rest is a path, if there is more than one token combine them to form the full path // The rest is a path, if there is more than one token combine them to form the full path
if (tok.length < 2) {
throw new Error(getQueryUsesIncorrect(queryUses));
}
if (tok.length > 3) { if (tok.length > 3) {
tok = [tok[0], tok[1], tok.slice(2).join('/')]; tok = [tok[0], tok[1], tok.slice(2).join('/')];
} }
// Check none of the parts of the repository name are empty if (tok.length < 2) {
if (tok[0].trim() === '' || tok[1].trim() === '') { throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
throw new Error(getQueryUsesIncorrect(queryUses));
} }
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref); let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
@@ -69,24 +62,10 @@ export class Config {
} }
} }
export function getQueryUsesBlank(): string { const configFolder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
return '"uses" value for queries cannot be blank';
}
export function getQueryUsesIncorrect(queryUses: string): string {
return '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
}
export function getConfigFileOutsideWorkspaceErrorMessage(configFile: string): string {
return 'The configuration file "' + configFile + '" is outside of the workspace';
}
export function getConfigFileDoesNotExistErrorMessage(configFile: string): string {
return 'The configuration file "' + configFile + '" does not exist';
}
function initConfig(): Config { function initConfig(): Config {
let configFile = core.getInput('config-file'); const configFile = core.getInput('config-file');
const config = new Config(); const config = new Config();
@@ -96,20 +75,7 @@ function initConfig(): Config {
return config; return config;
} }
// Treat the config file as relative to the workspace try {
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
configFile = path.resolve(workspacePath, configFile);
// Error if the config file is now outside of the workspace
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
}
// Error if the file does not exist
if (!fs.existsSync(configFile)) {
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
}
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8')); const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
if (parsedYAML.name && typeof parsedYAML.name === "string") { if (parsedYAML.name && typeof parsedYAML.name === "string") {
@@ -123,7 +89,7 @@ function initConfig(): Config {
const queries = parsedYAML.queries; const queries = parsedYAML.queries;
if (queries && queries instanceof Array) { if (queries && queries instanceof Array) {
queries.forEach(query => { queries.forEach(query => {
if (typeof query.uses === "string") { if (query.uses && typeof query.uses === "string") {
config.addQuery(query.uses); config.addQuery(query.uses);
} }
}); });
@@ -146,28 +112,23 @@ function initConfig(): Config {
} }
}); });
} }
} catch (err) {
core.setFailed(err);
}
return config; return config;
} }
function getConfigFolder(): string {
return util.getRequiredEnvParam('RUNNER_WORKSPACE');
}
export function getConfigFile(): string {
return path.join(getConfigFolder(), 'config');
}
async function saveConfig(config: Config) { async function saveConfig(config: Config) {
const configString = JSON.stringify(config); const configString = JSON.stringify(config);
await io.mkdirP(getConfigFolder()); await io.mkdirP(configFolder);
fs.writeFileSync(getConfigFile(), configString, 'utf8'); fs.writeFileSync(path.join(configFolder, 'config'), configString, 'utf8');
core.debug('Saved config:'); core.debug('Saved config:');
core.debug(configString); core.debug(configString);
} }
export async function loadConfig(): Promise<Config> { export async function loadConfig(): Promise<Config> {
const configFile = getConfigFile(); const configFile = path.join(configFolder, 'config');
if (fs.existsSync(configFile)) { if (fs.existsSync(configFile)) {
const configString = fs.readFileSync(configFile, 'utf8'); const configString = fs.readFileSync(configFile, 'utf8');
core.debug('Loaded config:'); core.debug('Loaded config:');

View File

@@ -2,7 +2,6 @@ import * as core from '@actions/core';
import * as exec from '@actions/exec'; import * as exec from '@actions/exec';
import * as io from '@actions/io'; import * as io from '@actions/io';
import * as fs from 'fs'; import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path'; import * as path from 'path';
import * as configUtils from './config-utils'; import * as configUtils from './config-utils';
@@ -11,23 +10,6 @@ import * as sharedEnv from './shared-environment';
import * as upload_lib from './upload-lib'; import * as upload_lib from './upload-lib';
import * as util from './util'; import * as util from './util';
function getMemoryFlag(): string {
let memoryToUseMegaBytes: number;
const memoryToUseString = core.getInput("ram");
if (memoryToUseString) {
memoryToUseMegaBytes = Number(memoryToUseString);
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
}
} else {
const totalMemoryBytes = os.totalmem();
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
const systemReservedMemoryMegaBytes = 256;
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
}
return "--ram=" + Math.floor(memoryToUseMegaBytes);
}
async function createdDBForScannedLanguages(codeqlCmd: string, databaseFolder: string) { async function createdDBForScannedLanguages(codeqlCmd: string, databaseFolder: string) {
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES]; const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
if (scannedLanguages) { if (scannedLanguages) {
@@ -131,7 +113,6 @@ async function runQueries(codeqlCmd: string, databaseFolder: string, sarifFolder
await exec.exec(codeqlCmd, [ await exec.exec(codeqlCmd, [
'database', 'database',
'analyze', 'analyze',
getMemoryFlag(),
path.join(databaseFolder, database), path.join(databaseFolder, database),
'--format=sarif-latest', '--format=sarif-latest',
'--output=' + sarifFile, '--output=' + sarifFile,

View File

@@ -1,6 +1,7 @@
import * as core from '@actions/core'; import * as core from '@actions/core';
import * as http from '@actions/http-client'; import * as http from '@actions/http-client';
import * as auth from '@actions/http-client/auth'; import * as auth from '@actions/http-client/auth';
import * as io from '@actions/io';
import fileUrl from 'file-url'; import fileUrl from 'file-url';
import * as fs from 'fs'; import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
@@ -10,6 +11,19 @@ import * as fingerprints from './fingerprints';
import * as sharedEnv from './shared-environment'; import * as sharedEnv from './shared-environment';
import * as util from './util'; import * as util from './util';
// Construct the location of the sentinel file for detecting multiple uploads.
// The returned location should be writable.
async function getSentinelFilePath(): Promise<string> {
// Use the temp dir instead of placing next to the sarif file because of
// issues with docker actions. The directory containing the sarif file
// may not be writable by us.
const uploadsTmpDir = path.join(process.env['RUNNER_TEMP'] || '/tmp/codeql-action', 'uploads');
await io.mkdirP(uploadsTmpDir);
// Hash the absolute path so we'll behave correctly in the unlikely
// scenario a file is referenced twice with different paths.
return path.join(uploadsTmpDir, 'codeql-action-upload-sentinel');
}
// Takes a list of paths to sarif files and combines them together, // Takes a list of paths to sarif files and combines them together,
// returning the contents of the combined sarif file. // returning the contents of the combined sarif file.
export function combineSarifFiles(sarifFiles: string[]): string { export function combineSarifFiles(sarifFiles: string[]): string {
@@ -114,27 +128,20 @@ export async function upload(input: string): Promise<boolean> {
} }
} }
// Counts the number of results in the given SARIF file
export function countResultsInSarif(sarif: string): number {
let numResults = 0;
for (const run of JSON.parse(sarif).runs) {
numResults += run.results.length;
}
return numResults;
}
// Uploads the given set of sarif files. // Uploads the given set of sarif files.
// Returns true iff the upload occurred and succeeded // Returns true iff the upload occurred and succeeded
async function uploadFiles(sarifFiles: string[]): Promise<boolean> { async function uploadFiles(sarifFiles: string[]): Promise<boolean> {
core.startGroup("Uploading results"); core.startGroup("Uploading results");
let succeeded = false; let succeeded = false;
try { try {
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF"; // Check if an upload has happened before. If so then abort.
if (process.env[sentinelEnvVar]) { // This is intended to catch when the finish and upload-sarif actions
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job"); // are used together, and then the upload-sarif action is invoked twice.
const sentinelFile = await getSentinelFilePath();
if (fs.existsSync(sentinelFile)) {
core.info("Aborting as an upload has already happened from this job");
return false; return false;
} }
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
const commitOid = util.getRequiredEnvParam('GITHUB_SHA'); const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID'); const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
@@ -177,14 +184,12 @@ async function uploadFiles(sarifFiles: string[]): Promise<boolean> {
"tool_names": toolNames, "tool_names": toolNames,
}); });
// Log some useful debug info about the info
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
// Make the upload // Make the upload
succeeded = await uploadPayload(payload); succeeded = await uploadPayload(payload);
// Mark that we have made an upload
fs.writeFileSync(sentinelFile, '');
} catch (error) { } catch (error) {
core.setFailed(error.message); core.setFailed(error.message);
} }

View File

@@ -377,9 +377,8 @@ export function getToolNames(sarifContents: string): string[] {
// Creates a random temporary directory, runs the given body, and then deletes the directory. // Creates a random temporary directory, runs the given body, and then deletes the directory.
// Mostly intended for use within tests. // Mostly intended for use within tests.
export async function withTmpDir<T>(body: (tmpDir: string) => Promise<T>): Promise<T> { export async function withTmpDir(body: (tmpDir: string) => Promise<void>) {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-')); const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
const result = await body(tmpDir); await body(tmpDir);
fs.rmdirSync(tmpDir, { recursive: true }); fs.rmdirSync(tmpDir, { recursive: true });
return result;
} }

View File

@@ -3,27 +3,15 @@ name: Use custom queries
disable-default-queries: true disable-default-queries: true
queries: queries:
# Query suites - name: Additional C++ queries
- name: Select a query suite uses: github/codeql/cpp/ql/src/codeql-suites/cpp-lgtm.qls@lgtm.com
uses: ./codeql-qlpacks/complex-python-qlpack/rootAndBar.qls - name: Additional C# queries
# QL pack subset uses: github/codeql/csharp/ql/src/codeql-suites/csharp-lgtm.qls@lgtm.com
- name: Select a ql file - name: Additional Go queries
uses: ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql uses: github/codeql-go/ql/src/codeql-suites/go-lgtm.qls@lgtm.com
- name: Select a subfolder - name: Additional Java queries
uses: ./codeql-qlpacks/complex-javascript-qlpack/foo uses: github/codeql/java/ql/src/codeql-suites/java-lgtm.qls@lgtm.com
- name: Select a folder with two subfolders - name: Additional Javascript queries
uses: ./codeql-qlpacks/complex-javascript-qlpack/foo2 uses: github/codeql/javascript/ql/src/codeql-suites/javascript-lgtm.qls@lgtm.com
# Inrepo QL pack - name: Additional Python queries
- name: Select an inrepo ql pack uses: github/codeql/python/ql/src/codeql-suites/python-lgtm.qls@lgtm.com
uses: ./codeql-qlpacks/csharp-qlpack
- name: Java queries
uses: ./codeql-qlpacks/java-qlpack
# External QL packs
- name: Go queries
uses: Anthophila/go-querypack@master
- name: Cpp queries
uses: Anthophila/cpp-querypack@second-branch
- name: Javascript queries
uses: Anthophila/javascript-querypack/show_ifs2.ql@master
- name: Python queries
uses: Anthophila/python-querypack/show_ifs2.ql@second-branch