mirror of
https://github.com/github/codeql-action.git
synced 2025-12-21 06:40:20 +08:00
Compare commits
2 Commits
testInputs
...
examples-t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
807be914ae | ||
|
|
6a82723ea1 |
1
.github/workflows/codeql.yml
vendored
1
.github/workflows/codeql.yml
vendored
@@ -13,5 +13,6 @@ jobs:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: ./init
|
||||
with:
|
||||
languages: javascript
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
- uses: ./analyze
|
||||
|
||||
@@ -84,7 +84,7 @@ The CodeQL action should be run on `push` events, and on a `schedule`. `Push` ev
|
||||
|
||||
### Configuration
|
||||
|
||||
You may optionally specify additional queries for CodeQL to execute by using a config file. The queries must belong to a [QL pack](https://help.semmle.com/codeql/codeql-cli/reference/qlpack-overview.html) and can be in your repository or any public repository. You can choose a single .ql file, a folder containing multiple .ql files, a .qls [query suite](https://help.semmle.com/codeql/codeql-cli/procedures/query-suites.html) file, or any combination of the above. To use queries stored in your repository or from other repositories use the same syntax as when [using an action](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepsuses). Note that when using local queries starting with `./`, the path is relative to the root of the repository and not to the location of the config file.
|
||||
You may optionally specify additional queries for CodeQL to execute by using a config file. The queries must belong to a [QL pack](https://help.semmle.com/codeql/codeql-cli/reference/qlpack-overview.html) and can be in your repository or any public repository. You can choose a single .ql file, a folder containing multiple .ql files, a .qls [query suite](https://help.semmle.com/codeql/codeql-cli/procedures/query-suites.html) file, or any combination of the above. To use queries from other repositories use the same syntax as when [using an action](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepsuses).
|
||||
|
||||
You can disable the default queries using `disable-default-queries: true`.
|
||||
|
||||
|
||||
@@ -12,9 +12,6 @@ inputs:
|
||||
description: Upload the SARIF file
|
||||
required: false
|
||||
default: true
|
||||
ram:
|
||||
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
||||
required: false
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
|
||||
4
examples/extended-cpp-queryset.yml
Normal file
4
examples/extended-cpp-queryset.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
name: extended-cpp-queryset
|
||||
queries:
|
||||
- name: Additional C++ queries
|
||||
uses: github/codeql/cpp/ql/src/codeql-suites/cpp-lgtm.qls@master
|
||||
4
examples/extended-csharp-queryset.yml
Normal file
4
examples/extended-csharp-queryset.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
name: extended-csharp-queryset
|
||||
queries:
|
||||
- name: Additional C# queries
|
||||
uses: github/codeql/csharp/ql/src/codeql-suites/csharp-lgtm.qls@master
|
||||
4
examples/extended-go-queryset.yml
Normal file
4
examples/extended-go-queryset.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
name: extended-go-queryset
|
||||
queries:
|
||||
- name: Additional Go queries
|
||||
uses: github/codeql-go/ql/src/codeql-suites/go-lgtm.qls@master
|
||||
4
examples/extended-java-queryset.yml
Normal file
4
examples/extended-java-queryset.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
name: extended-java-queryset
|
||||
queries:
|
||||
- name: Additional Java queries
|
||||
uses: github/codeql/java/ql/src/codeql-suites/java-lgtm.qls@master
|
||||
4
examples/extended-javascript-queryset.yml
Normal file
4
examples/extended-javascript-queryset.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
name: extended-javascript-queryset
|
||||
queries:
|
||||
- name: Additional Javascript queries
|
||||
uses: github/codeql/javascript/ql/src/codeql-suites/javascript-lgtm.qls@master
|
||||
4
examples/extended-python-queryset.yml
Normal file
4
examples/extended-python-queryset.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
name: extended-python-queryset
|
||||
queries:
|
||||
- name: Additional Python queries
|
||||
uses: github/codeql/python/ql/src/codeql-suites/python-lgtm.qls@master
|
||||
@@ -8,7 +8,7 @@ inputs:
|
||||
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200427/codeql-bundle.tar.gz
|
||||
languages:
|
||||
description: The languages to be analysed
|
||||
required: true
|
||||
required: false
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
config-file:
|
||||
|
||||
65
lib/config-utils.js
generated
65
lib/config-utils.js
generated
@@ -12,7 +12,6 @@ const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const path = __importStar(require("path"));
|
||||
const util = __importStar(require("./util"));
|
||||
class ExternalQuery {
|
||||
constructor(repository, ref) {
|
||||
this.path = '';
|
||||
@@ -33,33 +32,27 @@ class Config {
|
||||
addQuery(queryUses) {
|
||||
// The logic for parsing the string is based on what actions does for
|
||||
// parsing the 'uses' actions in the workflow file
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesBlank());
|
||||
throw '"uses" value for queries cannot be blank';
|
||||
}
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
this.additionalQueries.push(queryUses.slice(2));
|
||||
return;
|
||||
}
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw new Error(getQueryUsesIncorrect(queryUses));
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
}
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split('/');
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length < 2) {
|
||||
throw new Error(getQueryUsesIncorrect(queryUses));
|
||||
}
|
||||
if (tok.length > 3) {
|
||||
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
||||
}
|
||||
// Check none of the parts of the repository name are empty
|
||||
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||
throw new Error(getQueryUsesIncorrect(queryUses));
|
||||
if (tok.length < 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
}
|
||||
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
||||
if (tok.length === 3) {
|
||||
@@ -69,41 +62,16 @@ class Config {
|
||||
}
|
||||
}
|
||||
exports.Config = Config;
|
||||
function getQueryUsesBlank() {
|
||||
return '"uses" value for queries cannot be blank';
|
||||
}
|
||||
exports.getQueryUsesBlank = getQueryUsesBlank;
|
||||
function getQueryUsesIncorrect(queryUses) {
|
||||
return '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
}
|
||||
exports.getQueryUsesIncorrect = getQueryUsesIncorrect;
|
||||
function getConfigFileOutsideWorkspaceErrorMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" is outside of the workspace';
|
||||
}
|
||||
exports.getConfigFileOutsideWorkspaceErrorMessage = getConfigFileOutsideWorkspaceErrorMessage;
|
||||
function getConfigFileDoesNotExistErrorMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" does not exist';
|
||||
}
|
||||
exports.getConfigFileDoesNotExistErrorMessage = getConfigFileDoesNotExistErrorMessage;
|
||||
const configFolder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
function initConfig() {
|
||||
let configFile = core.getInput('config-file');
|
||||
const configFile = core.getInput('config-file');
|
||||
const config = new Config();
|
||||
// If no config file was provided create an empty one
|
||||
if (configFile === '') {
|
||||
core.debug('No configuration file was provided');
|
||||
return config;
|
||||
}
|
||||
// Treat the config file as relative to the workspace
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
configFile = path.resolve(workspacePath, configFile);
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
}
|
||||
// Error if the file does not exist
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||
}
|
||||
try {
|
||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
if (parsedYAML.name && typeof parsedYAML.name === "string") {
|
||||
config.name = parsedYAML.name;
|
||||
@@ -114,7 +82,7 @@ function initConfig() {
|
||||
const queries = parsedYAML.queries;
|
||||
if (queries && queries instanceof Array) {
|
||||
queries.forEach(query => {
|
||||
if (typeof query.uses === "string") {
|
||||
if (query.uses && typeof query.uses === "string") {
|
||||
config.addQuery(query.uses);
|
||||
}
|
||||
});
|
||||
@@ -135,24 +103,21 @@ function initConfig() {
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
core.setFailed(err);
|
||||
}
|
||||
return config;
|
||||
}
|
||||
function getConfigFolder() {
|
||||
return util.getRequiredEnvParam('RUNNER_WORKSPACE');
|
||||
}
|
||||
function getConfigFile() {
|
||||
return path.join(getConfigFolder(), 'config');
|
||||
}
|
||||
exports.getConfigFile = getConfigFile;
|
||||
async function saveConfig(config) {
|
||||
const configString = JSON.stringify(config);
|
||||
await io.mkdirP(getConfigFolder());
|
||||
fs.writeFileSync(getConfigFile(), configString, 'utf8');
|
||||
await io.mkdirP(configFolder);
|
||||
fs.writeFileSync(path.join(configFolder, 'config'), configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
}
|
||||
async function loadConfig() {
|
||||
const configFile = getConfigFile();
|
||||
const configFile = path.join(configFolder, 'config');
|
||||
if (fs.existsSync(configFile)) {
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
|
||||
File diff suppressed because one or more lines are too long
224
lib/config-utils.test.js
generated
224
lib/config-utils.test.js
generated
@@ -1,224 +0,0 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const util = __importStar(require("./util"));
|
||||
function setInput(name, value) {
|
||||
// Transformation copied from
|
||||
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
|
||||
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
|
||||
if (value !== undefined) {
|
||||
process.env[envVar] = value;
|
||||
}
|
||||
else {
|
||||
delete process.env[envVar];
|
||||
}
|
||||
}
|
||||
ava_1.default("load empty config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', undefined);
|
||||
const config = await configUtils.loadConfig();
|
||||
t.deepEqual(config, new configUtils.Config());
|
||||
});
|
||||
});
|
||||
ava_1.default("loading config saves config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const configFile = configUtils.getConfigFile();
|
||||
// Sanity check the saved config file does not already exist
|
||||
t.false(fs.existsSync(configFile));
|
||||
const config = await configUtils.loadConfig();
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configFile));
|
||||
// And the contents should parse correctly to the config that was returned
|
||||
t.deepEqual(fs.readFileSync(configFile, 'utf8'), JSON.stringify(config));
|
||||
});
|
||||
});
|
||||
ava_1.default("load input outside of workspace", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', '../input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-existent input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
t.false(fs.existsSync(path.join(tmpDir, 'input')));
|
||||
setInput('config-file', 'input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("load non-empty input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// Just create a generic config object with non-default values for all fields
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- uses: ./foo
|
||||
- uses: foo/bar@dev
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = new configUtils.Config();
|
||||
expectedConfig.name = 'my config';
|
||||
expectedConfig.disableDefaultQueries = true;
|
||||
expectedConfig.additionalQueries.push('foo');
|
||||
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
|
||||
expectedConfig.pathsIgnore = ['a', 'b'];
|
||||
expectedConfig.paths = ['c/d'];
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
const actualConfig = await configUtils.loadConfig();
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
ava_1.default("load partially invalid input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// The valid parts of this config should be parsed correctly.
|
||||
// The invalid parts should be ignored and left as the default values.
|
||||
const inputFileContents = `
|
||||
name:
|
||||
- foo: bar
|
||||
disable-default-queries: 42
|
||||
queries:
|
||||
- name: foo/bar
|
||||
uses: foo/bar@dev
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = new configUtils.Config();
|
||||
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
|
||||
expectedConfig.pathsIgnore = ['a', 'b'];
|
||||
expectedConfig.paths = ['c/d'];
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
const actualConfig = await configUtils.loadConfig();
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
ava_1.default("load invalid input - top level entries", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// Replace the arrays with strings or numbers.
|
||||
// The invalid parts should be ignored and left as the default values.
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries: foo
|
||||
paths-ignore: bar
|
||||
paths: 123`;
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = new configUtils.Config();
|
||||
expectedConfig.name = 'my config';
|
||||
expectedConfig.disableDefaultQueries = true;
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
const actualConfig = await configUtils.loadConfig();
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
ava_1.default("load invalid input - queries field type", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// Invalid contents of the "queries" array.
|
||||
// The invalid parts should be ignored and left as the default values.
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- name: foo
|
||||
uses:
|
||||
- hello: world
|
||||
- name: bar
|
||||
uses: github/bar@master`;
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = new configUtils.Config();
|
||||
expectedConfig.name = 'my config';
|
||||
expectedConfig.disableDefaultQueries = true;
|
||||
expectedConfig.externalQueries.push(new configUtils.ExternalQuery("github/bar", "master"));
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
const actualConfig = await configUtils.loadConfig();
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
// Various "uses" fields, and the errors they should produce
|
||||
const testInputs = {
|
||||
"''": configUtils.getQueryUsesBlank(),
|
||||
"foo/bar": configUtils.getQueryUsesIncorrect("foo/bar"),
|
||||
"foo/bar@v1@v2": configUtils.getQueryUsesIncorrect("foo/bar@v1@v2"),
|
||||
"foo@master": configUtils.getQueryUsesIncorrect("foo@master"),
|
||||
"https://github.com/foo/bar@master": configUtils.getQueryUsesIncorrect("https://github.com/foo/bar@master")
|
||||
};
|
||||
for (const [input, result] of Object.entries(testInputs)) {
|
||||
ava_1.default("load invalid input - queries uses \"" + input + "\"", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// Invalid contents of a "queries.uses" field.
|
||||
// Should fail with the expected error message
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
- name: foo
|
||||
uses: ` + input;
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(result));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
File diff suppressed because one or more lines are too long
19
lib/finalize-db.js
generated
19
lib/finalize-db.js
generated
@@ -11,30 +11,12 @@ const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
function getMemoryFlag() {
|
||||
let memoryToUseMegaBytes;
|
||||
const memoryToUseString = core.getInput("ram");
|
||||
if (memoryToUseString) {
|
||||
memoryToUseMegaBytes = Number(memoryToUseString);
|
||||
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
|
||||
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
|
||||
}
|
||||
}
|
||||
else {
|
||||
const totalMemoryBytes = os.totalmem();
|
||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||
const systemReservedMemoryMegaBytes = 256;
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
||||
}
|
||||
return "--ram=" + Math.floor(memoryToUseMegaBytes);
|
||||
}
|
||||
async function createdDBForScannedLanguages(codeqlCmd, databaseFolder) {
|
||||
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||
if (scannedLanguages) {
|
||||
@@ -115,7 +97,6 @@ async function runQueries(codeqlCmd, databaseFolder, sarifFolder, config) {
|
||||
await exec.exec(codeqlCmd, [
|
||||
'database',
|
||||
'analyze',
|
||||
getMemoryFlag(),
|
||||
path.join(databaseFolder, database),
|
||||
'--format=sarif-latest',
|
||||
'--output=' + sarifFile,
|
||||
|
||||
File diff suppressed because one or more lines are too long
38
lib/upload-lib.js
generated
38
lib/upload-lib.js
generated
@@ -13,6 +13,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const auth = __importStar(require("@actions/http-client/auth"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const file_url_1 = __importDefault(require("file-url"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
@@ -20,6 +21,18 @@ const zlib_1 = __importDefault(require("zlib"));
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
// Construct the location of the sentinel file for detecting multiple uploads.
|
||||
// The returned location should be writable.
|
||||
async function getSentinelFilePath() {
|
||||
// Use the temp dir instead of placing next to the sarif file because of
|
||||
// issues with docker actions. The directory containing the sarif file
|
||||
// may not be writable by us.
|
||||
const uploadsTmpDir = path.join(process.env['RUNNER_TEMP'] || '/tmp/codeql-action', 'uploads');
|
||||
await io.mkdirP(uploadsTmpDir);
|
||||
// Hash the absolute path so we'll behave correctly in the unlikely
|
||||
// scenario a file is referenced twice with different paths.
|
||||
return path.join(uploadsTmpDir, 'codeql-action-upload-sentinel');
|
||||
}
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
function combineSarifFiles(sarifFiles) {
|
||||
@@ -112,27 +125,20 @@ async function upload(input) {
|
||||
}
|
||||
}
|
||||
exports.upload = upload;
|
||||
// Counts the number of results in the given SARIF file
|
||||
function countResultsInSarif(sarif) {
|
||||
let numResults = 0;
|
||||
for (const run of JSON.parse(sarif).runs) {
|
||||
numResults += run.results.length;
|
||||
}
|
||||
return numResults;
|
||||
}
|
||||
exports.countResultsInSarif = countResultsInSarif;
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles) {
|
||||
core.startGroup("Uploading results");
|
||||
let succeeded = false;
|
||||
try {
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
// Check if an upload has happened before. If so then abort.
|
||||
// This is intended to catch when the finish and upload-sarif actions
|
||||
// are used together, and then the upload-sarif action is invoked twice.
|
||||
const sentinelFile = await getSentinelFilePath();
|
||||
if (fs.existsSync(sentinelFile)) {
|
||||
core.info("Aborting as an upload has already happened from this job");
|
||||
return false;
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRef();
|
||||
@@ -167,12 +173,10 @@ async function uploadFiles(sarifFiles) {
|
||||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
// Log some useful debug info about the info
|
||||
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
|
||||
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
|
||||
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
|
||||
// Make the upload
|
||||
succeeded = await uploadPayload(payload);
|
||||
// Mark that we have made an upload
|
||||
fs.writeFileSync(sentinelFile, '');
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
|
||||
File diff suppressed because one or more lines are too long
3
lib/util.js
generated
3
lib/util.js
generated
@@ -337,9 +337,8 @@ exports.getToolNames = getToolNames;
|
||||
// Mostly intended for use within tests.
|
||||
async function withTmpDir(body) {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
|
||||
const result = await body(tmpDir);
|
||||
await body(tmpDir);
|
||||
fs.rmdirSync(tmpDir, { recursive: true });
|
||||
return result;
|
||||
}
|
||||
exports.withTmpDir = withTmpDir;
|
||||
//# sourceMappingURL=util.js.map
|
||||
File diff suppressed because one or more lines are too long
@@ -5,7 +5,7 @@
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "ava src/** --serial",
|
||||
"test": "ava src/**",
|
||||
"lint": "tslint -p . -c tslint.json 'src/**/*.ts'",
|
||||
"removeNPMAbsolutePaths": "removeNPMAbsolutePaths . --force"
|
||||
},
|
||||
|
||||
@@ -1,253 +0,0 @@
|
||||
import test from 'ava';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as configUtils from './config-utils';
|
||||
import * as util from './util';
|
||||
|
||||
function setInput(name: string, value: string | undefined) {
|
||||
// Transformation copied from
|
||||
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
|
||||
const envVar = `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
|
||||
if (value !== undefined) {
|
||||
process.env[envVar] = value;
|
||||
} else {
|
||||
delete process.env[envVar];
|
||||
}
|
||||
}
|
||||
|
||||
test("load empty config", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
setInput('config-file', undefined);
|
||||
|
||||
const config = await configUtils.loadConfig();
|
||||
|
||||
t.deepEqual(config, new configUtils.Config());
|
||||
});
|
||||
});
|
||||
|
||||
test("loading config saves config", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
const configFile = configUtils.getConfigFile();
|
||||
// Sanity check the saved config file does not already exist
|
||||
t.false(fs.existsSync(configFile));
|
||||
|
||||
const config = await configUtils.loadConfig();
|
||||
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configFile));
|
||||
|
||||
// And the contents should parse correctly to the config that was returned
|
||||
t.deepEqual(fs.readFileSync(configFile, 'utf8'), JSON.stringify(config));
|
||||
});
|
||||
});
|
||||
|
||||
test("load input outside of workspace", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
setInput('config-file', '../input');
|
||||
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("load non-existent input", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
t.false(fs.existsSync(path.join(tmpDir, 'input')));
|
||||
setInput('config-file', 'input');
|
||||
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("load non-empty input", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
// Just create a generic config object with non-default values for all fields
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- uses: ./foo
|
||||
- uses: foo/bar@dev
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = new configUtils.Config();
|
||||
expectedConfig.name = 'my config';
|
||||
expectedConfig.disableDefaultQueries = true;
|
||||
expectedConfig.additionalQueries.push('foo');
|
||||
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
|
||||
expectedConfig.pathsIgnore = ['a', 'b'];
|
||||
expectedConfig.paths = ['c/d'];
|
||||
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
|
||||
const actualConfig = await configUtils.loadConfig();
|
||||
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
|
||||
test("load partially invalid input", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
// The valid parts of this config should be parsed correctly.
|
||||
// The invalid parts should be ignored and left as the default values.
|
||||
const inputFileContents = `
|
||||
name:
|
||||
- foo: bar
|
||||
disable-default-queries: 42
|
||||
queries:
|
||||
- name: foo/bar
|
||||
uses: foo/bar@dev
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = new configUtils.Config();
|
||||
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
|
||||
expectedConfig.pathsIgnore = ['a', 'b'];
|
||||
expectedConfig.paths = ['c/d'];
|
||||
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
|
||||
const actualConfig = await configUtils.loadConfig();
|
||||
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
|
||||
test("load invalid input - top level entries", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
// Replace the arrays with strings or numbers.
|
||||
// The invalid parts should be ignored and left as the default values.
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries: foo
|
||||
paths-ignore: bar
|
||||
paths: 123`;
|
||||
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = new configUtils.Config();
|
||||
expectedConfig.name = 'my config';
|
||||
expectedConfig.disableDefaultQueries = true;
|
||||
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
|
||||
const actualConfig = await configUtils.loadConfig();
|
||||
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
|
||||
test("load invalid input - queries field type", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
// Invalid contents of the "queries" array.
|
||||
// The invalid parts should be ignored and left as the default values.
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- name: foo
|
||||
uses:
|
||||
- hello: world
|
||||
- name: bar
|
||||
uses: github/bar@master`;
|
||||
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = new configUtils.Config();
|
||||
expectedConfig.name = 'my config';
|
||||
expectedConfig.disableDefaultQueries = true;
|
||||
expectedConfig.externalQueries.push(new configUtils.ExternalQuery("github/bar", "master"));
|
||||
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
|
||||
const actualConfig = await configUtils.loadConfig();
|
||||
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
|
||||
// Various "uses" fields, and the errors they should produce
|
||||
const testInputs = {
|
||||
"''": configUtils.getQueryUsesBlank(),
|
||||
"foo/bar": configUtils.getQueryUsesIncorrect("foo/bar"),
|
||||
"foo/bar@v1@v2": configUtils.getQueryUsesIncorrect("foo/bar@v1@v2"),
|
||||
"foo@master": configUtils.getQueryUsesIncorrect("foo@master"),
|
||||
"https://github.com/foo/bar@master": configUtils.getQueryUsesIncorrect("https://github.com/foo/bar@master")
|
||||
};
|
||||
|
||||
for (const [input, result] of Object.entries(testInputs)) {
|
||||
test("load invalid input - queries uses \"" + input + "\"", async t => {
|
||||
return await util.withTmpDir(async tmpDir => {
|
||||
process.env['RUNNER_WORKSPACE'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
|
||||
// Invalid contents of a "queries.uses" field.
|
||||
// Should fail with the expected error message
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
- name: foo
|
||||
uses: ` + input;
|
||||
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
} catch (err) {
|
||||
t.deepEqual(err, new Error(result));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -4,8 +4,6 @@ import * as fs from 'fs';
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as util from './util';
|
||||
|
||||
export class ExternalQuery {
|
||||
public repository: string;
|
||||
public ref: string;
|
||||
@@ -28,12 +26,11 @@ export class Config {
|
||||
public addQuery(queryUses: string) {
|
||||
// The logic for parsing the string is based on what actions does for
|
||||
// parsing the 'uses' actions in the workflow file
|
||||
queryUses = queryUses.trim();
|
||||
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesBlank());
|
||||
throw '"uses" value for queries cannot be blank';
|
||||
}
|
||||
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
this.additionalQueries.push(queryUses.slice(2));
|
||||
return;
|
||||
@@ -41,7 +38,7 @@ export class Config {
|
||||
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw new Error(getQueryUsesIncorrect(queryUses));
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
}
|
||||
|
||||
const ref = tok[1];
|
||||
@@ -49,16 +46,12 @@ export class Config {
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length < 2) {
|
||||
throw new Error(getQueryUsesIncorrect(queryUses));
|
||||
}
|
||||
if (tok.length > 3) {
|
||||
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
||||
}
|
||||
|
||||
// Check none of the parts of the repository name are empty
|
||||
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||
throw new Error(getQueryUsesIncorrect(queryUses));
|
||||
if (tok.length < 2) {
|
||||
throw '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
}
|
||||
|
||||
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
||||
@@ -69,24 +62,10 @@ export class Config {
|
||||
}
|
||||
}
|
||||
|
||||
export function getQueryUsesBlank(): string {
|
||||
return '"uses" value for queries cannot be blank';
|
||||
}
|
||||
|
||||
export function getQueryUsesIncorrect(queryUses: string): string {
|
||||
return '"uses" value for queries must be a path, or owner/repo@ref \n Found: ' + queryUses;
|
||||
}
|
||||
|
||||
export function getConfigFileOutsideWorkspaceErrorMessage(configFile: string): string {
|
||||
return 'The configuration file "' + configFile + '" is outside of the workspace';
|
||||
}
|
||||
|
||||
export function getConfigFileDoesNotExistErrorMessage(configFile: string): string {
|
||||
return 'The configuration file "' + configFile + '" does not exist';
|
||||
}
|
||||
const configFolder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
|
||||
function initConfig(): Config {
|
||||
let configFile = core.getInput('config-file');
|
||||
const configFile = core.getInput('config-file');
|
||||
|
||||
const config = new Config();
|
||||
|
||||
@@ -96,20 +75,7 @@ function initConfig(): Config {
|
||||
return config;
|
||||
}
|
||||
|
||||
// Treat the config file as relative to the workspace
|
||||
const workspacePath = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
configFile = path.resolve(workspacePath, configFile);
|
||||
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
}
|
||||
|
||||
// Error if the file does not exist
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||
}
|
||||
|
||||
try {
|
||||
const parsedYAML = yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
|
||||
if (parsedYAML.name && typeof parsedYAML.name === "string") {
|
||||
@@ -123,7 +89,7 @@ function initConfig(): Config {
|
||||
const queries = parsedYAML.queries;
|
||||
if (queries && queries instanceof Array) {
|
||||
queries.forEach(query => {
|
||||
if (typeof query.uses === "string") {
|
||||
if (query.uses && typeof query.uses === "string") {
|
||||
config.addQuery(query.uses);
|
||||
}
|
||||
});
|
||||
@@ -146,28 +112,23 @@ function initConfig(): Config {
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
core.setFailed(err);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
function getConfigFolder(): string {
|
||||
return util.getRequiredEnvParam('RUNNER_WORKSPACE');
|
||||
}
|
||||
|
||||
export function getConfigFile(): string {
|
||||
return path.join(getConfigFolder(), 'config');
|
||||
}
|
||||
|
||||
async function saveConfig(config: Config) {
|
||||
const configString = JSON.stringify(config);
|
||||
await io.mkdirP(getConfigFolder());
|
||||
fs.writeFileSync(getConfigFile(), configString, 'utf8');
|
||||
await io.mkdirP(configFolder);
|
||||
fs.writeFileSync(path.join(configFolder, 'config'), configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
}
|
||||
|
||||
export async function loadConfig(): Promise<Config> {
|
||||
const configFile = getConfigFile();
|
||||
const configFile = path.join(configFolder, 'config');
|
||||
if (fs.existsSync(configFile)) {
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
|
||||
@@ -2,7 +2,6 @@ import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as io from '@actions/io';
|
||||
import * as fs from 'fs';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as configUtils from './config-utils';
|
||||
@@ -11,23 +10,6 @@ import * as sharedEnv from './shared-environment';
|
||||
import * as upload_lib from './upload-lib';
|
||||
import * as util from './util';
|
||||
|
||||
function getMemoryFlag(): string {
|
||||
let memoryToUseMegaBytes: number;
|
||||
const memoryToUseString = core.getInput("ram");
|
||||
if (memoryToUseString) {
|
||||
memoryToUseMegaBytes = Number(memoryToUseString);
|
||||
if (Number.isNaN(memoryToUseMegaBytes) || memoryToUseMegaBytes <= 0) {
|
||||
throw new Error("Invalid RAM setting \"" + memoryToUseString + "\", specified.");
|
||||
}
|
||||
} else {
|
||||
const totalMemoryBytes = os.totalmem();
|
||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||
const systemReservedMemoryMegaBytes = 256;
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
|
||||
}
|
||||
return "--ram=" + Math.floor(memoryToUseMegaBytes);
|
||||
}
|
||||
|
||||
async function createdDBForScannedLanguages(codeqlCmd: string, databaseFolder: string) {
|
||||
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||
if (scannedLanguages) {
|
||||
@@ -131,7 +113,6 @@ async function runQueries(codeqlCmd: string, databaseFolder: string, sarifFolder
|
||||
await exec.exec(codeqlCmd, [
|
||||
'database',
|
||||
'analyze',
|
||||
getMemoryFlag(),
|
||||
path.join(databaseFolder, database),
|
||||
'--format=sarif-latest',
|
||||
'--output=' + sarifFile,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as http from '@actions/http-client';
|
||||
import * as auth from '@actions/http-client/auth';
|
||||
import * as io from '@actions/io';
|
||||
import fileUrl from 'file-url';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
@@ -10,6 +11,19 @@ import * as fingerprints from './fingerprints';
|
||||
import * as sharedEnv from './shared-environment';
|
||||
import * as util from './util';
|
||||
|
||||
// Construct the location of the sentinel file for detecting multiple uploads.
|
||||
// The returned location should be writable.
|
||||
async function getSentinelFilePath(): Promise<string> {
|
||||
// Use the temp dir instead of placing next to the sarif file because of
|
||||
// issues with docker actions. The directory containing the sarif file
|
||||
// may not be writable by us.
|
||||
const uploadsTmpDir = path.join(process.env['RUNNER_TEMP'] || '/tmp/codeql-action', 'uploads');
|
||||
await io.mkdirP(uploadsTmpDir);
|
||||
// Hash the absolute path so we'll behave correctly in the unlikely
|
||||
// scenario a file is referenced twice with different paths.
|
||||
return path.join(uploadsTmpDir, 'codeql-action-upload-sentinel');
|
||||
}
|
||||
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
export function combineSarifFiles(sarifFiles: string[]): string {
|
||||
@@ -114,27 +128,20 @@ export async function upload(input: string): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
|
||||
// Counts the number of results in the given SARIF file
|
||||
export function countResultsInSarif(sarif: string): number {
|
||||
let numResults = 0;
|
||||
for (const run of JSON.parse(sarif).runs) {
|
||||
numResults += run.results.length;
|
||||
}
|
||||
return numResults;
|
||||
}
|
||||
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles: string[]): Promise<boolean> {
|
||||
core.startGroup("Uploading results");
|
||||
let succeeded = false;
|
||||
try {
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
// Check if an upload has happened before. If so then abort.
|
||||
// This is intended to catch when the finish and upload-sarif actions
|
||||
// are used together, and then the upload-sarif action is invoked twice.
|
||||
const sentinelFile = await getSentinelFilePath();
|
||||
if (fs.existsSync(sentinelFile)) {
|
||||
core.info("Aborting as an upload has already happened from this job");
|
||||
return false;
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
|
||||
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
@@ -177,14 +184,12 @@ async function uploadFiles(sarifFiles: string[]): Promise<boolean> {
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
|
||||
// Log some useful debug info about the info
|
||||
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
|
||||
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
|
||||
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
|
||||
|
||||
// Make the upload
|
||||
succeeded = await uploadPayload(payload);
|
||||
|
||||
// Mark that we have made an upload
|
||||
fs.writeFileSync(sentinelFile, '');
|
||||
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
}
|
||||
|
||||
@@ -377,9 +377,8 @@ export function getToolNames(sarifContents: string): string[] {
|
||||
|
||||
// Creates a random temporary directory, runs the given body, and then deletes the directory.
|
||||
// Mostly intended for use within tests.
|
||||
export async function withTmpDir<T>(body: (tmpDir: string) => Promise<T>): Promise<T> {
|
||||
export async function withTmpDir(body: (tmpDir: string) => Promise<void>) {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
|
||||
const result = await body(tmpDir);
|
||||
await body(tmpDir);
|
||||
fs.rmdirSync(tmpDir, { recursive: true });
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -3,27 +3,15 @@ name: Use custom queries
|
||||
disable-default-queries: true
|
||||
|
||||
queries:
|
||||
# Query suites
|
||||
- name: Select a query suite
|
||||
uses: ./codeql-qlpacks/complex-python-qlpack/rootAndBar.qls
|
||||
# QL pack subset
|
||||
- name: Select a ql file
|
||||
uses: ./codeql-qlpacks/complex-javascript-qlpack/show_ifs.ql
|
||||
- name: Select a subfolder
|
||||
uses: ./codeql-qlpacks/complex-javascript-qlpack/foo
|
||||
- name: Select a folder with two subfolders
|
||||
uses: ./codeql-qlpacks/complex-javascript-qlpack/foo2
|
||||
# Inrepo QL pack
|
||||
- name: Select an inrepo ql pack
|
||||
uses: ./codeql-qlpacks/csharp-qlpack
|
||||
- name: Java queries
|
||||
uses: ./codeql-qlpacks/java-qlpack
|
||||
# External QL packs
|
||||
- name: Go queries
|
||||
uses: Anthophila/go-querypack@master
|
||||
- name: Cpp queries
|
||||
uses: Anthophila/cpp-querypack@second-branch
|
||||
- name: Javascript queries
|
||||
uses: Anthophila/javascript-querypack/show_ifs2.ql@master
|
||||
- name: Python queries
|
||||
uses: Anthophila/python-querypack/show_ifs2.ql@second-branch
|
||||
- name: Additional C++ queries
|
||||
uses: github/codeql/cpp/ql/src/codeql-suites/cpp-lgtm.qls@lgtm.com
|
||||
- name: Additional C# queries
|
||||
uses: github/codeql/csharp/ql/src/codeql-suites/csharp-lgtm.qls@lgtm.com
|
||||
- name: Additional Go queries
|
||||
uses: github/codeql-go/ql/src/codeql-suites/go-lgtm.qls@lgtm.com
|
||||
- name: Additional Java queries
|
||||
uses: github/codeql/java/ql/src/codeql-suites/java-lgtm.qls@lgtm.com
|
||||
- name: Additional Javascript queries
|
||||
uses: github/codeql/javascript/ql/src/codeql-suites/javascript-lgtm.qls@lgtm.com
|
||||
- name: Additional Python queries
|
||||
uses: github/codeql/python/ql/src/codeql-suites/python-lgtm.qls@lgtm.com
|
||||
Reference in New Issue
Block a user