mirror of
https://github.com/github/codeql-action.git
synced 2025-12-26 17:20:10 +08:00
Compare commits
46 Commits
codeql-bun
...
v1.0.25
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
546b30f35a | ||
|
|
d1dde03d7a | ||
|
|
f44219c94b | ||
|
|
bdaac951f7 | ||
|
|
a82f53a364 | ||
|
|
f721f011bf | ||
|
|
c82e09aa41 | ||
|
|
460d053698 | ||
|
|
3bf14e85d8 | ||
|
|
13a9d6c442 | ||
|
|
dd65833ab6 | ||
|
|
c2d9e4b48f | ||
|
|
e095058bfa | ||
|
|
2c99f99c4a | ||
|
|
bcd7e6896f | ||
|
|
0b242db78f | ||
|
|
c897659213 | ||
|
|
8b902e1723 | ||
|
|
26567f6a49 | ||
|
|
dbf7ac4b37 | ||
|
|
077f7b2532 | ||
|
|
a392055010 | ||
|
|
0aea878963 | ||
|
|
bca71988d3 | ||
|
|
02e1cdcd36 | ||
|
|
4860ed1ad4 | ||
|
|
3e36cddb07 | ||
|
|
b9bd459b70 | ||
|
|
215c4f5ff5 | ||
|
|
4eef7ef32c | ||
|
|
e0b9b9a248 | ||
|
|
823bb21bbb | ||
|
|
49fc4c9b40 | ||
|
|
21a786fda0 | ||
|
|
316ad9d919 | ||
|
|
a627e9fa50 | ||
|
|
160021fe53 | ||
|
|
3f2269bf58 | ||
|
|
2ecc17d74f | ||
|
|
9b506fed7c | ||
|
|
2803f4a792 | ||
|
|
720bf9d157 | ||
|
|
bbf0a22e84 | ||
|
|
d7b5c618a4 | ||
|
|
37a4db94ad | ||
|
|
6a98a4b500 |
2
.github/workflows/__debug-artifacts.yml
generated
vendored
2
.github/workflows/__debug-artifacts.yml
generated
vendored
@@ -41,6 +41,8 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
2
.github/workflows/__go-custom-queries.yml
generated
vendored
2
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -44,6 +44,8 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
|
||||
2
.github/workflows/__multi-language-autodetect.yml
generated
vendored
2
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -41,6 +41,8 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
|
||||
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -35,6 +35,8 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||
|
||||
2
.github/workflows/__packaging-config-js.yml
generated
vendored
2
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -35,6 +35,8 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging.yml
|
||||
|
||||
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -35,6 +35,8 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging2.yml
|
||||
|
||||
2
.github/workflows/__remote-config.yml
generated
vendored
2
.github/workflows/__remote-config.yml
generated
vendored
@@ -44,6 +44,8 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
2
.github/workflows/__split-workflow.yml
generated
vendored
2
.github/workflows/__split-workflow.yml
generated
vendored
@@ -35,6 +35,8 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||
|
||||
2
.github/workflows/__test-local-codeql.yml
generated
vendored
2
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -35,6 +35,8 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- name: Fetch a CodeQL bundle
|
||||
shell: bash
|
||||
env:
|
||||
|
||||
2
.github/workflows/__unset-environment.yml
generated
vendored
2
.github/workflows/__unset-environment.yml
generated
vendored
@@ -41,6 +41,8 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
|
||||
9
.github/workflows/pr-checks.yml
vendored
9
.github/workflows/pr-checks.yml
vendored
@@ -163,6 +163,9 @@ jobs:
|
||||
cd ../action/runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
@@ -200,6 +203,9 @@ jobs:
|
||||
cd ../action/runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
@@ -246,6 +252,9 @@ jobs:
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
2
.github/workflows/update-dependencies.yml
vendored
2
.github/workflows/update-dependencies.yml
vendored
@@ -11,8 +11,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Remove PR label
|
||||
env:
|
||||
|
||||
11
CHANGELOG.md
11
CHANGELOG.md
@@ -1,9 +1,18 @@
|
||||
# CodeQL Action and CodeQL Runner Changelog
|
||||
|
||||
## [UNRELEASED]
|
||||
## 1.0.25 - 06 Dec 2021
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.0.24 - 23 Nov 2021
|
||||
|
||||
- Update default CodeQL bundle version to 2.7.2. [#827](https://github.com/github/codeql-action/pull/827)
|
||||
|
||||
## 1.0.23 - 16 Nov 2021
|
||||
|
||||
- The `upload-sarif` action now allows multiple uploads in a single job, as long as they have different categories. [#801](https://github.com/github/codeql-action/pull/801)
|
||||
- Update default CodeQL bundle version to 2.7.1. [#816](https://github.com/github/codeql-action/pull/816)
|
||||
|
||||
## 1.0.22 - 04 Nov 2021
|
||||
|
||||
- The `init` step of the Action now supports `ram` and `threads` inputs to limit resource use of CodeQL extractors. These inputs also serve as defaults to the subsequent `analyze` step, which finalizes the database and executes queries. [#738](https://github.com/github/codeql-action/pull/738)
|
||||
|
||||
@@ -52,6 +52,10 @@ inputs:
|
||||
description: Whether to upload the resulting CodeQL database
|
||||
required: false
|
||||
default: "true"
|
||||
wait-for-processing:
|
||||
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
|
||||
required: true
|
||||
default: "false"
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
|
||||
1
lib/actions-util.js
generated
1
lib/actions-util.js
generated
@@ -98,6 +98,7 @@ const getCommitOid = async function (ref = "HEAD") {
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
|
||||
core.info(e.stack || "NO STACK");
|
||||
return (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
}
|
||||
};
|
||||
|
||||
File diff suppressed because one or more lines are too long
15
lib/analyze-action.js
generated
15
lib/analyze-action.js
generated
@@ -50,7 +50,7 @@ async function sendStatusReport(startedAt, stats, error) {
|
||||
exports.sendStatusReport = sendStatusReport;
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let uploadStats = undefined;
|
||||
let uploadResult = undefined;
|
||||
let runStats = undefined;
|
||||
let config = undefined;
|
||||
util.initializeEnvironment(util.Mode.actions, pkg.version);
|
||||
@@ -105,13 +105,17 @@ async function run() {
|
||||
}
|
||||
core.setOutput("db-locations", dbLocations);
|
||||
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
||||
uploadStats = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
|
||||
uploadResult = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
|
||||
}
|
||||
else {
|
||||
logger.info("Not uploading results");
|
||||
}
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger); // Possibly upload the database bundles for remote queries
|
||||
if (uploadResult !== undefined &&
|
||||
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
}
|
||||
if (config.debugMode) {
|
||||
// Upload the database bundles as an Actions artifact for debugging
|
||||
const toUpload = [];
|
||||
@@ -156,8 +160,11 @@ async function run() {
|
||||
}
|
||||
}
|
||||
}
|
||||
if (runStats && uploadStats) {
|
||||
await sendStatusReport(startedAt, { ...runStats, ...uploadStats });
|
||||
if (runStats && uploadResult) {
|
||||
await sendStatusReport(startedAt, {
|
||||
...runStats,
|
||||
...uploadResult.statusReport,
|
||||
});
|
||||
}
|
||||
else if (runStats) {
|
||||
await sendStatusReport(startedAt, { ...runStats });
|
||||
|
||||
File diff suppressed because one or more lines are too long
35
lib/database-upload.js
generated
35
lib/database-upload.js
generated
@@ -42,11 +42,13 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||
return;
|
||||
}
|
||||
const client = (0, api_client_1.getApiClient)(apiDetails);
|
||||
let useUploadDomain;
|
||||
try {
|
||||
await client.request("GET /repos/:owner/:repo/code-scanning/codeql/databases", {
|
||||
const response = await client.request("GET /repos/:owner/:repo/code-scanning/codeql/databases", {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
});
|
||||
useUploadDomain = response.data["uploads_domain_enabled"];
|
||||
}
|
||||
catch (e) {
|
||||
if (util.isHTTPError(e) && e.status === 404) {
|
||||
@@ -60,15 +62,32 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
// Upload the database bundle
|
||||
// Upload the database bundle.
|
||||
// Although we are uploading arbitrary file contents to the API, it's worth
|
||||
// noting that it's the API's job to validate that the contents is acceptable.
|
||||
// This API method is available to anyone with write access to the repo.
|
||||
const payload = fs.readFileSync(await (0, util_1.bundleDb)(config, language, codeql));
|
||||
try {
|
||||
await client.request(`PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language`, {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
language,
|
||||
data: payload,
|
||||
});
|
||||
if (useUploadDomain) {
|
||||
await client.request(`POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name`, {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
language,
|
||||
name: `${language}-database`,
|
||||
data: payload,
|
||||
headers: {
|
||||
authorization: `token ${apiDetails.auth}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
else {
|
||||
await client.request(`PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language`, {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
language,
|
||||
data: payload,
|
||||
});
|
||||
}
|
||||
logger.debug(`Successfully uploaded database for ${language}`);
|
||||
}
|
||||
catch (e) {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,IAAI;QACF,MAAM,MAAM,CAAC,OAAO,CAClB,wDAAwD,EACxD;YACE,KAAK,EAAE,aAAa,CAAC,KAAK;YAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;SACzB,CACF,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3C,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;SACH;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,MAAM,CAAC,IAAI,CAAC,kDAAkD,CAAC,EAAE,CAAC,CAAC;SACpE;QACD,OAAO;KACR;IAED,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,6BAA6B;QAC7B,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC,CAAC;QAC1E,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,kEAAkE,EAClE;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,QAAQ;gBACR,IAAI,EAAE,OAAO;aACd,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAjED,0CAiEC"}
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,IAAI,eAAwB,CAAC;IAC7B,IAAI;QACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,wDAAwD,EACxD;YACE,KAAK,EAAE,aAAa,CAAC,KAAK;YAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;SACzB,CACF,CAAC;QACF,eAAe,GAAG,QAAQ,CAAC,IAAI,CAAC,wBAAwB,CAAC,CAAC;KAC3D;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3C,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;SACH;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,MAAM,CAAC,IAAI,CAAC,kDAAkD,CAAC,EAAE,CAAC,CAAC;SACpE;QACD,OAAO;KACR;IAED,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,8BAA8B;QAC9B,2EAA2E;QAC3E,8EAA8E;QAC9E,wEAAwE;QACxE,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC,CAAC;QAC1E,IAAI;YACF,IAAI,eAAe,EAAE;gBACnB,MAAM,MAAM,CAAC,OAAO,CAClB,wGAAwG,EACxG;oBACE,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;oBAC5B,IAAI,EAAE,OAAO;oBACb,OAAO,EAAE;wBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;qBAC1C;iBACF,CACF,CAAC;aACH;iBAAM;gBACL,MAAM,MAAM,CAAC,OAAO,CAClB,kEAAkE,EAClE;oBACE,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,OAAO;iBACd,CACF,CAAC;aACH;YACD,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAtFD,0CAsFC"}
|
||||
42
lib/database-upload.test.js
generated
42
lib/database-upload.test.js
generated
@@ -81,19 +81,29 @@ function getRecordingLogger(messages) {
|
||||
endGroup: () => undefined,
|
||||
};
|
||||
}
|
||||
function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
||||
function mockHttpRequests(optInStatusCode, useUploadDomain, databaseUploadStatusCode) {
|
||||
// Passing an auth token is required, so we just use a dummy value
|
||||
const client = github.getOctokit("123");
|
||||
const requestSpy = sinon.stub(client, "request");
|
||||
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql/databases");
|
||||
if (optInStatusCode < 300) {
|
||||
optInSpy.resolves(undefined);
|
||||
optInSpy.resolves({
|
||||
status: optInStatusCode,
|
||||
data: {
|
||||
useUploadDomain,
|
||||
},
|
||||
headers: {},
|
||||
url: "GET /repos/:owner/:repo/code-scanning/codeql/databases",
|
||||
});
|
||||
}
|
||||
else {
|
||||
optInSpy.throws(new util_1.HTTPError("some error message", optInStatusCode));
|
||||
}
|
||||
if (databaseUploadStatusCode !== undefined) {
|
||||
const databaseUploadSpy = requestSpy.withArgs("PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language");
|
||||
const url = useUploadDomain
|
||||
? "POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name"
|
||||
: "PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language";
|
||||
const databaseUploadSpy = requestSpy.withArgs(url);
|
||||
if (databaseUploadStatusCode < 300) {
|
||||
databaseUploadSpy.resolves(undefined);
|
||||
}
|
||||
@@ -213,7 +223,7 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
mockHttpRequests(204, 500);
|
||||
mockHttpRequests(200, false, 500);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
fs.writeFileSync(outputFilePath, "");
|
||||
@@ -226,7 +236,7 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
||||
"Failed to upload database for javascript: Error: some error message") !== undefined);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Successfully uploading a database", async (t) => {
|
||||
(0, ava_1.default)("Successfully uploading a database to api.github.com", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
sinon
|
||||
@@ -234,7 +244,27 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
mockHttpRequests(204, 201);
|
||||
mockHttpRequests(200, false, 201);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
fs.writeFileSync(outputFilePath, "");
|
||||
},
|
||||
});
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Successfully uploaded database for javascript") !== undefined);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Successfully uploading a database to uploads.github.com", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
mockHttpRequests(200, true, 201);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
fs.writeFileSync(outputFilePath, "");
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20211025"
|
||||
"bundleVersion": "codeql-bundle-20211122"
|
||||
}
|
||||
|
||||
18
lib/tracer-config.js
generated
18
lib/tracer-config.js
generated
@@ -182,15 +182,15 @@ async function getCombinedTracerConfig(config, codeql) {
|
||||
tracedLanguageConfigs[language] = await getTracerConfigForLanguage(codeql, config, language);
|
||||
}
|
||||
mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
|
||||
}
|
||||
// Add a couple more variables
|
||||
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
|
||||
const codeQLDir = path.dirname(codeql.getPath());
|
||||
if (process.platform === "darwin") {
|
||||
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
|
||||
}
|
||||
else if (process.platform !== "win32") {
|
||||
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
|
||||
// Add a couple more variables
|
||||
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
|
||||
const codeQLDir = path.dirname(codeql.getPath());
|
||||
if (process.platform === "darwin") {
|
||||
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
|
||||
}
|
||||
else if (process.platform !== "win32") {
|
||||
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
|
||||
}
|
||||
}
|
||||
// On macos it's necessary to prefix the build command with the runner executable
|
||||
// on order to trace when System Integrity Protection is enabled.
|
||||
|
||||
File diff suppressed because one or more lines are too long
98
lib/upload-lib.js
generated
98
lib/upload-lib.js
generated
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
|
||||
exports.validateUniqueCategory = exports.waitForProcessing = exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const zlib_1 = __importDefault(require("zlib"));
|
||||
@@ -105,6 +105,7 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
|
||||
});
|
||||
logger.debug(`response status: ${response.status}`);
|
||||
logger.info("Successfully uploaded results");
|
||||
return response.data.id;
|
||||
}
|
||||
// Recursively walks a directory and returns all SARIF files it finds.
|
||||
// Does not follow symlinks.
|
||||
@@ -243,14 +244,7 @@ exports.buildPayload = buildPayload;
|
||||
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, gitHubVersion, apiDetails, logger) {
|
||||
logger.startGroup("Uploading results");
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
||||
if (util.isActions()) {
|
||||
// This check only works on actions as env vars don't persist between calls to the runner
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
}
|
||||
validateUniqueCategory(category);
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
validateSarifFileSchema(file, logger);
|
||||
@@ -270,12 +264,90 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||
// Make the upload
|
||||
await uploadPayload(payload, repositoryNwo, apiDetails, logger);
|
||||
const sarifID = await uploadPayload(payload, repositoryNwo, apiDetails, logger);
|
||||
logger.endGroup();
|
||||
return {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif,
|
||||
statusReport: {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif,
|
||||
},
|
||||
sarifID,
|
||||
};
|
||||
}
|
||||
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
|
||||
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
|
||||
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
|
||||
async function waitForProcessing(repositoryNwo, sarifID, apiDetails, logger) {
|
||||
logger.startGroup("Waiting for processing to finish");
|
||||
const client = api.getApiClient(apiDetails);
|
||||
const statusCheckingStarted = Date.now();
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (Date.now() >
|
||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
||||
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
||||
break;
|
||||
}
|
||||
try {
|
||||
const response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
sarif_id: sarifID,
|
||||
});
|
||||
const status = response.data.processing_status;
|
||||
logger.info(`Analysis upload status is ${status}.`);
|
||||
if (status === "complete") {
|
||||
break;
|
||||
}
|
||||
else if (status === "failed") {
|
||||
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
if (util.isHTTPError(e)) {
|
||||
switch (e.status) {
|
||||
case 404:
|
||||
logger.debug("Analysis is not found yet...");
|
||||
break; // Note this breaks from the case statement, not the outer loop.
|
||||
default:
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||
}
|
||||
logger.endGroup();
|
||||
}
|
||||
exports.waitForProcessing = waitForProcessing;
|
||||
function validateUniqueCategory(category) {
|
||||
if (util.isActions()) {
|
||||
// This check only works on actions as env vars don't persist between calls to the runner
|
||||
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF${category ? `_${sanitize(category)}` : ""}`;
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per category. " +
|
||||
"Please specify a unique `category` to call this action multiple times. " +
|
||||
`Category: ${category ? category : "(none)"}`);
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
}
|
||||
}
|
||||
exports.validateUniqueCategory = validateUniqueCategory;
|
||||
/**
|
||||
* Santizes a string to be used as an environment variable name.
|
||||
* This will replace all non-alphanumeric characters with underscores.
|
||||
* There could still be some false category clashes if two uploads
|
||||
* occur that differ only in their non-alphanumeric characters. This is
|
||||
* unlikely.
|
||||
*
|
||||
* @param str the initial value to sanitize
|
||||
*/
|
||||
function sanitize(str) {
|
||||
return str.replace(/[^a-zA-Z0-9_]/g, "_");
|
||||
}
|
||||
//# sourceMappingURL=upload-lib.js.map
|
||||
File diff suppressed because one or more lines are too long
16
lib/upload-lib.test.js
generated
16
lib/upload-lib.test.js
generated
@@ -113,4 +113,20 @@ ava_1.default.beforeEach(() => {
|
||||
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, undefined, analysisKey, '{"os": "linux", "language": "javascript"}');
|
||||
t.deepEqual(modifiedSarif, expectedSarif);
|
||||
});
|
||||
(0, ava_1.default)("validateUniqueCategory", (t) => {
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(undefined));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(undefined));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc"));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("def"));
|
||||
// Our category sanitization is not perfect. Here are some examples
|
||||
// of where we see false clashes
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc/def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc@def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc_def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc def"));
|
||||
// this one is fine
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc_ def"));
|
||||
});
|
||||
//# sourceMappingURL=upload-lib.test.js.map
|
||||
File diff suppressed because one or more lines are too long
8
lib/upload-sarif-action.js
generated
8
lib/upload-sarif-action.js
generated
@@ -22,6 +22,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
@@ -46,8 +47,11 @@ async function run() {
|
||||
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
|
||||
};
|
||||
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
|
||||
const uploadStats = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
await sendSuccessStatusReport(startedAt, uploadStats);
|
||||
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
}
|
||||
await sendSuccessStatusReport(startedAt, uploadResult.statusReport);
|
||||
}
|
||||
catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YAClE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
16
lib/util.js
generated
16
lib/util.js
generated
@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
@@ -486,10 +486,20 @@ exports.codeQlVersionAbove = codeQlVersionAbove;
|
||||
async function bundleDb(config, language, codeql) {
|
||||
const databasePath = getCodeQLDatabasePath(config, language);
|
||||
const databaseBundlePath = path.resolve(config.dbLocation, `${databasePath}.zip`);
|
||||
if (!fs.existsSync(databaseBundlePath)) {
|
||||
await codeql.databaseBundle(databasePath, databaseBundlePath);
|
||||
// For a tiny bit of added safety, delete the file if it exists.
|
||||
// The file is probably from an earlier call to this function, either
|
||||
// as part of this action step or a previous one, but it could also be
|
||||
// from somewhere else or someone trying to make the action upload a
|
||||
// non-database file.
|
||||
if (fs.existsSync(databaseBundlePath)) {
|
||||
fs.rmSync(databaseBundlePath, { recursive: true });
|
||||
}
|
||||
await codeql.databaseBundle(databasePath, databaseBundlePath);
|
||||
return databaseBundlePath;
|
||||
}
|
||||
exports.bundleDb = bundleDb;
|
||||
async function delay(milliseconds) {
|
||||
return new Promise((resolve) => setTimeout(resolve, milliseconds));
|
||||
}
|
||||
exports.delay = delay;
|
||||
//# sourceMappingURL=util.js.map
|
||||
File diff suppressed because one or more lines are too long
2
node_modules/.package-lock.json
generated
vendored
2
node_modules/.package-lock.json
generated
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "1.0.23",
|
||||
"version": "1.0.25",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "1.0.23",
|
||||
"version": "1.0.25",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "codeql",
|
||||
"version": "1.0.23",
|
||||
"version": "1.0.25",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^0.5.2",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "1.0.23",
|
||||
"version": "1.0.25",
|
||||
"private": true,
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
|
||||
@@ -2,6 +2,8 @@ name: "Debug artifact upload"
|
||||
description: "Checks that debugging artifacts are correctly uploaded"
|
||||
os: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
name: "Go: Custom queries"
|
||||
description: "Checks that Go works in conjunction with a config file specifying custom queries"
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: "^1.13.1"
|
||||
|
||||
@@ -2,6 +2,8 @@ name: "Multi-language repository"
|
||||
description: "An end-to-end integration test of a multi-language repository using automatic language detection"
|
||||
os: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: "${{ runner.temp }}/customDbLocation"
|
||||
|
||||
@@ -3,6 +3,8 @@ description: "Checks that specifying packages using a combination of a config fi
|
||||
versions: ["nightly-20210831"] # This CLI version is known to work with package used in this test
|
||||
os: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: ".github/codeql/codeql-config-packaging3.yml"
|
||||
|
||||
@@ -3,6 +3,8 @@ description: "Checks that specifying packages using only a config file works"
|
||||
versions: ["nightly-20210831"] # This CLI version is known to work with package used in this test
|
||||
os: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: ".github/codeql/codeql-config-packaging.yml"
|
||||
|
||||
@@ -3,6 +3,8 @@ description: "Checks that specifying packages using the input to the Action work
|
||||
versions: ["nightly-20210831"] # This CLI version is known to work with package used in this test
|
||||
os: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: ".github/codeql/codeql-config-packaging2.yml"
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
name: "Remote config file"
|
||||
description: "Checks that specifying packages using only a config file works"
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
@@ -3,6 +3,8 @@ description: "Tests a split-up workflow in which we first build a database and l
|
||||
versions: ["nightly-20210831"] # This CLI version is known to work with package used in this test
|
||||
os: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: ".github/codeql/codeql-config-packaging3.yml"
|
||||
|
||||
@@ -3,6 +3,8 @@ description: "Tests using a CodeQL bundle from a local file rather than a URL"
|
||||
versions: ["nightly-latest"]
|
||||
os: ["ubuntu-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- name: Fetch a CodeQL bundle
|
||||
shell: bash
|
||||
env:
|
||||
|
||||
@@ -2,6 +2,8 @@ name: "Test unsetting environment variables"
|
||||
description: "An end-to-end integration test that unsets some environment variables"
|
||||
os: ["ubuntu-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: "${{ runner.temp }}/customDbLocation"
|
||||
|
||||
2
runner/package-lock.json
generated
2
runner/package-lock.json
generated
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql-runner",
|
||||
"version": "1.0.23",
|
||||
"version": "1.0.25",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql-runner",
|
||||
"version": "1.0.23",
|
||||
"version": "1.0.25",
|
||||
"private": true,
|
||||
"description": "CodeQL runner",
|
||||
"scripts": {
|
||||
|
||||
@@ -85,6 +85,7 @@ export const getCommitOid = async function (ref = "HEAD"): Promise<string> {
|
||||
core.info(
|
||||
`Failed to call git to get current commit. Continuing with data from environment: ${e}`
|
||||
);
|
||||
core.info((e as Error).stack || "NO STACK");
|
||||
return getRequiredEnvParam("GITHUB_SHA");
|
||||
}
|
||||
};
|
||||
|
||||
@@ -18,7 +18,7 @@ import { uploadDatabases } from "./database-upload";
|
||||
import { getActionsLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import * as upload_lib from "./upload-lib";
|
||||
import { UploadStatusReport } from "./upload-lib";
|
||||
import { UploadResult } from "./upload-lib";
|
||||
import * as util from "./util";
|
||||
import { bundleDb, codeQlVersionAbove, DEBUG_ARTIFACT_NAME } from "./util";
|
||||
|
||||
@@ -58,7 +58,7 @@ export async function sendStatusReport(
|
||||
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let uploadStats: UploadStatusReport | undefined = undefined;
|
||||
let uploadResult: UploadResult | undefined = undefined;
|
||||
let runStats: QueriesStatusReport | undefined = undefined;
|
||||
let config: Config | undefined = undefined;
|
||||
util.initializeEnvironment(util.Mode.actions, pkg.version);
|
||||
@@ -163,7 +163,7 @@ async function run() {
|
||||
core.setOutput("db-locations", dbLocations);
|
||||
|
||||
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
||||
uploadStats = await upload_lib.uploadFromActions(
|
||||
uploadResult = await upload_lib.uploadFromActions(
|
||||
outputDir,
|
||||
config.gitHubVersion,
|
||||
apiDetails,
|
||||
@@ -178,6 +178,18 @@ async function run() {
|
||||
);
|
||||
await uploadDatabases(repositoryNwo, config, apiDetails, logger); // Possibly upload the database bundles for remote queries
|
||||
|
||||
if (
|
||||
uploadResult !== undefined &&
|
||||
actionsUtil.getRequiredInput("wait-for-processing") === "true"
|
||||
) {
|
||||
await upload_lib.waitForProcessing(
|
||||
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
|
||||
uploadResult.sarifID,
|
||||
apiDetails,
|
||||
getActionsLogger()
|
||||
);
|
||||
}
|
||||
|
||||
if (config.debugMode) {
|
||||
// Upload the database bundles as an Actions artifact for debugging
|
||||
const toUpload: string[] = [];
|
||||
@@ -227,8 +239,11 @@ async function run() {
|
||||
}
|
||||
}
|
||||
|
||||
if (runStats && uploadStats) {
|
||||
await sendStatusReport(startedAt, { ...runStats, ...uploadStats });
|
||||
if (runStats && uploadResult) {
|
||||
await sendStatusReport(startedAt, {
|
||||
...runStats,
|
||||
...uploadResult.statusReport,
|
||||
});
|
||||
} else if (runStats) {
|
||||
await sendStatusReport(startedAt, { ...runStats });
|
||||
} else {
|
||||
|
||||
@@ -82,6 +82,7 @@ function getRecordingLogger(messages: LoggedMessage[]): Logger {
|
||||
|
||||
function mockHttpRequests(
|
||||
optInStatusCode: number,
|
||||
useUploadDomain?: boolean,
|
||||
databaseUploadStatusCode?: number
|
||||
) {
|
||||
// Passing an auth token is required, so we just use a dummy value
|
||||
@@ -93,15 +94,23 @@ function mockHttpRequests(
|
||||
"GET /repos/:owner/:repo/code-scanning/codeql/databases"
|
||||
);
|
||||
if (optInStatusCode < 300) {
|
||||
optInSpy.resolves(undefined);
|
||||
optInSpy.resolves({
|
||||
status: optInStatusCode,
|
||||
data: {
|
||||
useUploadDomain,
|
||||
},
|
||||
headers: {},
|
||||
url: "GET /repos/:owner/:repo/code-scanning/codeql/databases",
|
||||
});
|
||||
} else {
|
||||
optInSpy.throws(new HTTPError("some error message", optInStatusCode));
|
||||
}
|
||||
|
||||
if (databaseUploadStatusCode !== undefined) {
|
||||
const databaseUploadSpy = requestSpy.withArgs(
|
||||
"PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language"
|
||||
);
|
||||
const url = useUploadDomain
|
||||
? "POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name"
|
||||
: "PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language";
|
||||
const databaseUploadSpy = requestSpy.withArgs(url);
|
||||
if (databaseUploadStatusCode < 300) {
|
||||
databaseUploadSpy.resolves(undefined);
|
||||
} else {
|
||||
@@ -303,7 +312,7 @@ test("Don't crash if uploading a database fails", async (t) => {
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
mockHttpRequests(204, 500);
|
||||
mockHttpRequests(200, false, 500);
|
||||
|
||||
setCodeQL({
|
||||
async databaseBundle(_: string, outputFilePath: string) {
|
||||
@@ -329,7 +338,7 @@ test("Don't crash if uploading a database fails", async (t) => {
|
||||
});
|
||||
});
|
||||
|
||||
test("Successfully uploading a database", async (t) => {
|
||||
test("Successfully uploading a database to api.github.com", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
sinon
|
||||
@@ -338,7 +347,41 @@ test("Successfully uploading a database", async (t) => {
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
mockHttpRequests(204, 201);
|
||||
mockHttpRequests(200, false, 201);
|
||||
|
||||
setCodeQL({
|
||||
async databaseBundle(_: string, outputFilePath: string) {
|
||||
fs.writeFileSync(outputFilePath, "");
|
||||
},
|
||||
});
|
||||
|
||||
const loggedMessages = [] as LoggedMessage[];
|
||||
await uploadDatabases(
|
||||
testRepoName,
|
||||
getTestConfig(tmpDir),
|
||||
testApiDetails,
|
||||
getRecordingLogger(loggedMessages)
|
||||
);
|
||||
t.assert(
|
||||
loggedMessages.find(
|
||||
(v) =>
|
||||
v.type === "debug" &&
|
||||
v.message === "Successfully uploaded database for javascript"
|
||||
) !== undefined
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test("Successfully uploading a database to uploads.github.com", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
mockHttpRequests(200, true, 201);
|
||||
|
||||
setCodeQL({
|
||||
async databaseBundle(_: string, outputFilePath: string) {
|
||||
|
||||
@@ -33,14 +33,16 @@ export async function uploadDatabases(
|
||||
}
|
||||
|
||||
const client = getApiClient(apiDetails);
|
||||
let useUploadDomain: boolean;
|
||||
try {
|
||||
await client.request(
|
||||
const response = await client.request(
|
||||
"GET /repos/:owner/:repo/code-scanning/codeql/databases",
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
}
|
||||
);
|
||||
useUploadDomain = response.data["uploads_domain_enabled"];
|
||||
} catch (e) {
|
||||
if (util.isHTTPError(e) && e.status === 404) {
|
||||
logger.debug(
|
||||
@@ -55,18 +57,37 @@ export async function uploadDatabases(
|
||||
|
||||
const codeql = await getCodeQL(config.codeQLCmd);
|
||||
for (const language of config.languages) {
|
||||
// Upload the database bundle
|
||||
// Upload the database bundle.
|
||||
// Although we are uploading arbitrary file contents to the API, it's worth
|
||||
// noting that it's the API's job to validate that the contents is acceptable.
|
||||
// This API method is available to anyone with write access to the repo.
|
||||
const payload = fs.readFileSync(await bundleDb(config, language, codeql));
|
||||
try {
|
||||
await client.request(
|
||||
`PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language`,
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
language,
|
||||
data: payload,
|
||||
}
|
||||
);
|
||||
if (useUploadDomain) {
|
||||
await client.request(
|
||||
`POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name`,
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
language,
|
||||
name: `${language}-database`,
|
||||
data: payload,
|
||||
headers: {
|
||||
authorization: `token ${apiDetails.auth}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
} else {
|
||||
await client.request(
|
||||
`PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language`,
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
language,
|
||||
data: payload,
|
||||
}
|
||||
);
|
||||
}
|
||||
logger.debug(`Successfully uploaded database for ${language}`);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20211025"
|
||||
"bundleVersion": "codeql-bundle-20211122"
|
||||
}
|
||||
|
||||
@@ -212,25 +212,25 @@ export async function getCombinedTracerConfig(
|
||||
);
|
||||
}
|
||||
mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
|
||||
}
|
||||
|
||||
// Add a couple more variables
|
||||
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
|
||||
const codeQLDir = path.dirname(codeql.getPath());
|
||||
if (process.platform === "darwin") {
|
||||
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(
|
||||
codeQLDir,
|
||||
"tools",
|
||||
"osx64",
|
||||
"libtrace.dylib"
|
||||
);
|
||||
} else if (process.platform !== "win32") {
|
||||
mainTracerConfig.env["LD_PRELOAD"] = path.join(
|
||||
codeQLDir,
|
||||
"tools",
|
||||
"linux64",
|
||||
"${LIB}trace.so"
|
||||
);
|
||||
// Add a couple more variables
|
||||
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
|
||||
const codeQLDir = path.dirname(codeql.getPath());
|
||||
if (process.platform === "darwin") {
|
||||
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(
|
||||
codeQLDir,
|
||||
"tools",
|
||||
"osx64",
|
||||
"libtrace.dylib"
|
||||
);
|
||||
} else if (process.platform !== "win32") {
|
||||
mainTracerConfig.env["LD_PRELOAD"] = path.join(
|
||||
codeQLDir,
|
||||
"tools",
|
||||
"linux64",
|
||||
"${LIB}trace.so"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// On macos it's necessary to prefix the build command with the runner executable
|
||||
|
||||
@@ -175,3 +175,24 @@ test("populateRunAutomationDetails", (t) => {
|
||||
);
|
||||
t.deepEqual(modifiedSarif, expectedSarif);
|
||||
});
|
||||
|
||||
test("validateUniqueCategory", (t) => {
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(undefined));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(undefined));
|
||||
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc"));
|
||||
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("def"));
|
||||
|
||||
// Our category sanitization is not perfect. Here are some examples
|
||||
// of where we see false clashes
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc/def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc@def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc_def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc def"));
|
||||
|
||||
// this one is fine
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc_ def"));
|
||||
});
|
||||
|
||||
@@ -110,6 +110,8 @@ async function uploadPayload(
|
||||
|
||||
logger.debug(`response status: ${response.status}`);
|
||||
logger.info("Successfully uploaded results");
|
||||
|
||||
return response.data.id;
|
||||
}
|
||||
|
||||
export interface UploadStatusReport {
|
||||
@@ -121,6 +123,11 @@ export interface UploadStatusReport {
|
||||
num_results_in_sarif?: number;
|
||||
}
|
||||
|
||||
export interface UploadResult {
|
||||
statusReport: UploadStatusReport;
|
||||
sarifID: string;
|
||||
}
|
||||
|
||||
// Recursively walks a directory and returns all SARIF files it finds.
|
||||
// Does not follow symlinks.
|
||||
export function findSarifFilesInDir(sarifPath: string): string[] {
|
||||
@@ -147,7 +154,7 @@ export async function uploadFromActions(
|
||||
gitHubVersion: util.GitHubVersion,
|
||||
apiDetails: api.GitHubApiDetails,
|
||||
logger: Logger
|
||||
): Promise<UploadStatusReport> {
|
||||
): Promise<UploadResult> {
|
||||
return await uploadFiles(
|
||||
getSarifFilePaths(sarifPath),
|
||||
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
|
||||
@@ -178,7 +185,7 @@ export async function uploadFromRunner(
|
||||
gitHubVersion: util.GitHubVersion,
|
||||
apiDetails: api.GitHubApiDetails,
|
||||
logger: Logger
|
||||
): Promise<UploadStatusReport> {
|
||||
): Promise<UploadResult> {
|
||||
return await uploadFiles(
|
||||
getSarifFilePaths(sarifPath),
|
||||
repositoryNwo,
|
||||
@@ -339,20 +346,11 @@ async function uploadFiles(
|
||||
gitHubVersion: util.GitHubVersion,
|
||||
apiDetails: api.GitHubApiDetails,
|
||||
logger: Logger
|
||||
): Promise<UploadStatusReport> {
|
||||
): Promise<UploadResult> {
|
||||
logger.startGroup("Uploading results");
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
||||
|
||||
if (util.isActions()) {
|
||||
// This check only works on actions as env vars don't persist between calls to the runner
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error(
|
||||
"Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job"
|
||||
);
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
}
|
||||
validateUniqueCategory(category);
|
||||
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
@@ -399,13 +397,114 @@ async function uploadFiles(
|
||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||
|
||||
// Make the upload
|
||||
await uploadPayload(payload, repositoryNwo, apiDetails, logger);
|
||||
const sarifID = await uploadPayload(
|
||||
payload,
|
||||
repositoryNwo,
|
||||
apiDetails,
|
||||
logger
|
||||
);
|
||||
|
||||
logger.endGroup();
|
||||
|
||||
return {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif,
|
||||
statusReport: {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif,
|
||||
},
|
||||
sarifID,
|
||||
};
|
||||
}
|
||||
|
||||
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
|
||||
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
|
||||
|
||||
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
|
||||
export async function waitForProcessing(
|
||||
repositoryNwo: RepositoryNwo,
|
||||
sarifID: string,
|
||||
apiDetails: api.GitHubApiDetails,
|
||||
logger: Logger
|
||||
): Promise<void> {
|
||||
logger.startGroup("Waiting for processing to finish");
|
||||
const client = api.getApiClient(apiDetails);
|
||||
|
||||
const statusCheckingStarted = Date.now();
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (
|
||||
Date.now() >
|
||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS
|
||||
) {
|
||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
||||
logger.warning(
|
||||
"Timed out waiting for analysis to finish processing. Continuing."
|
||||
);
|
||||
break;
|
||||
}
|
||||
try {
|
||||
const response = await client.request(
|
||||
"GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id",
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
sarif_id: sarifID,
|
||||
}
|
||||
);
|
||||
const status = response.data.processing_status;
|
||||
logger.info(`Analysis upload status is ${status}.`);
|
||||
if (status === "complete") {
|
||||
break;
|
||||
} else if (status === "failed") {
|
||||
throw new Error(
|
||||
`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
if (util.isHTTPError(e)) {
|
||||
switch (e.status) {
|
||||
case 404:
|
||||
logger.debug("Analysis is not found yet...");
|
||||
break; // Note this breaks from the case statement, not the outer loop.
|
||||
default:
|
||||
throw e;
|
||||
}
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||
}
|
||||
logger.endGroup();
|
||||
}
|
||||
|
||||
export function validateUniqueCategory(category: string | undefined) {
|
||||
if (util.isActions()) {
|
||||
// This check only works on actions as env vars don't persist between calls to the runner
|
||||
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF${
|
||||
category ? `_${sanitize(category)}` : ""
|
||||
}`;
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error(
|
||||
"Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per category. " +
|
||||
"Please specify a unique `category` to call this action multiple times. " +
|
||||
`Category: ${category ? category : "(none)"}`
|
||||
);
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Santizes a string to be used as an environment variable name.
|
||||
* This will replace all non-alphanumeric characters with underscores.
|
||||
* There could still be some false category clashes if two uploads
|
||||
* occur that differ only in their non-alphanumeric characters. This is
|
||||
* unlikely.
|
||||
*
|
||||
* @param str the initial value to sanitize
|
||||
*/
|
||||
function sanitize(str: string) {
|
||||
return str.replace(/[^a-zA-Z0-9_]/g, "_");
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import * as core from "@actions/core";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { getActionsLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import * as upload_lib from "./upload-lib";
|
||||
import {
|
||||
getGitHubVersion,
|
||||
@@ -56,13 +57,21 @@ async function run() {
|
||||
|
||||
const gitHubVersion = await getGitHubVersion(apiDetails);
|
||||
|
||||
const uploadStats = await upload_lib.uploadFromActions(
|
||||
const uploadResult = await upload_lib.uploadFromActions(
|
||||
actionsUtil.getRequiredInput("sarif_file"),
|
||||
gitHubVersion,
|
||||
apiDetails,
|
||||
getActionsLogger()
|
||||
);
|
||||
await sendSuccessStatusReport(startedAt, uploadStats);
|
||||
if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
await upload_lib.waitForProcessing(
|
||||
parseRepositoryNwo(getRequiredEnvParam("GITHUB_REPOSITORY")),
|
||||
uploadResult.sarifID,
|
||||
apiDetails,
|
||||
getActionsLogger()
|
||||
);
|
||||
}
|
||||
await sendSuccessStatusReport(startedAt, uploadResult.statusReport);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
const stack = error instanceof Error ? error.stack : String(error);
|
||||
|
||||
14
src/util.ts
14
src/util.ts
@@ -559,8 +559,18 @@ export async function bundleDb(
|
||||
config.dbLocation,
|
||||
`${databasePath}.zip`
|
||||
);
|
||||
if (!fs.existsSync(databaseBundlePath)) {
|
||||
await codeql.databaseBundle(databasePath, databaseBundlePath);
|
||||
// For a tiny bit of added safety, delete the file if it exists.
|
||||
// The file is probably from an earlier call to this function, either
|
||||
// as part of this action step or a previous one, but it could also be
|
||||
// from somewhere else or someone trying to make the action upload a
|
||||
// non-database file.
|
||||
if (fs.existsSync(databaseBundlePath)) {
|
||||
fs.rmSync(databaseBundlePath, { recursive: true });
|
||||
}
|
||||
await codeql.databaseBundle(databasePath, databaseBundlePath);
|
||||
return databaseBundlePath;
|
||||
}
|
||||
|
||||
export async function delay(milliseconds: number) {
|
||||
return new Promise((resolve) => setTimeout(resolve, milliseconds));
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ inputs:
|
||||
description: |
|
||||
The SARIF file or directory of SARIF files to be uploaded to GitHub code scanning.
|
||||
See https://docs.github.com/en/code-security/code-scanning/integrating-with-code-scanning/uploading-a-sarif-file-to-github#uploading-a-code-scanning-analysis-with-github-actions
|
||||
for information on the maximum number of results and maximum file size supported by code scanning.
|
||||
for information on the maximum number of results and maximum file size supported by code scanning.
|
||||
required: false
|
||||
default: '../results'
|
||||
checkout_path:
|
||||
@@ -20,6 +20,10 @@ inputs:
|
||||
category:
|
||||
description: String used by Code Scanning for matching the analyses
|
||||
required: false
|
||||
wait-for-processing:
|
||||
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
|
||||
required: true
|
||||
default: "false"
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: '../lib/upload-sarif-action.js'
|
||||
|
||||
Reference in New Issue
Block a user