mirror of
https://github.com/github/codeql-action.git
synced 2025-12-06 15:58:06 +08:00
Compare commits
73 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
384cfc42b2 | ||
|
|
5a1e31dc6a | ||
|
|
67c0353a8c | ||
|
|
7ec25e02e3 | ||
|
|
713eacdf6c | ||
|
|
c0b507e521 | ||
|
|
d563b098d7 | ||
|
|
fca047627b | ||
|
|
28fe8e7028 | ||
|
|
8a4b243fbf | ||
|
|
19970ae6b5 | ||
|
|
ec1b16574e | ||
|
|
b31df3ff95 | ||
|
|
776db51d2e | ||
|
|
b886234637 | ||
|
|
9913c9bfa5 | ||
|
|
8de62beb50 | ||
|
|
b6fbccaba1 | ||
|
|
df0c306daf | ||
|
|
ab1f709732 | ||
|
|
8454e21c9c | ||
|
|
d85c3e58ec | ||
|
|
cbabe47a0b | ||
|
|
f8a48f464d | ||
|
|
f6f23f8671 | ||
|
|
c2a7379048 | ||
|
|
cd783c8a29 | ||
|
|
300c8b6dcb | ||
|
|
faa9ba7363 | ||
|
|
d2a0fc83dc | ||
|
|
71112ab35d | ||
|
|
e677af3fd0 | ||
|
|
848e5140d4 | ||
|
|
e7fe6da378 | ||
|
|
2159631658 | ||
|
|
9de1702400 | ||
|
|
efded22908 | ||
|
|
5602bd50bf | ||
|
|
2f4be8e34b | ||
|
|
9763bdd6ec | ||
|
|
00d4d60204 | ||
|
|
e5d84de18b | ||
|
|
ea1acc573a | ||
|
|
79ea6d6a7c | ||
|
|
3e50d096f8 | ||
|
|
cca1cfdacf | ||
|
|
cdea582765 | ||
|
|
3e59dee9e2 | ||
|
|
249c7ffce1 | ||
|
|
254816c2d2 | ||
|
|
6d62c245ec | ||
|
|
5e87034b3b | ||
|
|
621e0794ac | ||
|
|
d6499fad61 | ||
|
|
04671efa1d | ||
|
|
e1f05902cd | ||
|
|
f9e96fa857 | ||
|
|
14a5537e13 | ||
|
|
d3eb4974a3 | ||
|
|
39216d10d3 | ||
|
|
265a7db16a | ||
|
|
f623d4cec3 | ||
|
|
eacec3646a | ||
|
|
e0e2abc1a5 | ||
|
|
716b5980cd | ||
|
|
1d83f2a0bc | ||
|
|
ce77f88627 | ||
|
|
a777b51ef7 | ||
|
|
88fbabe21d | ||
|
|
eeb215b041 | ||
|
|
b7b7607959 | ||
|
|
7bcc6564d4 | ||
|
|
b661ef1697 |
1
.github/prepare-test/action.yml
vendored
1
.github/prepare-test/action.yml
vendored
@@ -28,7 +28,6 @@ runs:
|
||||
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz"
|
||||
elif [[ ${{ inputs.version }} == *"stable"* ]]; then
|
||||
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
||||
echo "Hello $VERSION"
|
||||
echo "::set-output name=tools-url::https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz"
|
||||
elif [[ ${{ inputs.version }} == "latest" ]]; then
|
||||
echo "::set-output name=tools-url::latest"
|
||||
|
||||
8
.github/workflows/__debug-artifacts.yml
generated
vendored
8
.github/workflows/__debug-artifacts.yml
generated
vendored
@@ -41,12 +41,12 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
debug: true
|
||||
debug-artifact-name: my-debug-artifacts
|
||||
debug-database-name: my-db
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
id: analysis
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
|
||||
name: my-debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
|
||||
- shell: bash
|
||||
run: |
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
echo "Missing a SARIF file for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -f "$language.zip" ]] ; then
|
||||
if [[ ! -f "my-db-$language.zip" ]] ; then
|
||||
echo "Missing a database bundle for $language"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
2
.github/workflows/__go-custom-queries.yml
generated
vendored
2
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -44,8 +44,6 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
|
||||
2
.github/workflows/__multi-language-autodetect.yml
generated
vendored
2
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -41,8 +41,6 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
|
||||
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -35,8 +35,6 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||
|
||||
2
.github/workflows/__packaging-config-js.yml
generated
vendored
2
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -35,8 +35,6 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging.yml
|
||||
|
||||
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -35,8 +35,6 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging2.yml
|
||||
|
||||
2
.github/workflows/__remote-config.yml
generated
vendored
2
.github/workflows/__remote-config.yml
generated
vendored
@@ -44,8 +44,6 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
2
.github/workflows/__split-workflow.yml
generated
vendored
2
.github/workflows/__split-workflow.yml
generated
vendored
@@ -35,8 +35,6 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||
|
||||
2
.github/workflows/__test-local-codeql.yml
generated
vendored
2
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -35,8 +35,6 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- name: Fetch a CodeQL bundle
|
||||
shell: bash
|
||||
env:
|
||||
|
||||
2
.github/workflows/__unset-environment.yml
generated
vendored
2
.github/workflows/__unset-environment.yml
generated
vendored
@@ -41,8 +41,6 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
|
||||
9
.github/workflows/pr-checks.yml
vendored
9
.github/workflows/pr-checks.yml
vendored
@@ -164,9 +164,6 @@ jobs:
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
@@ -204,9 +201,6 @@ jobs:
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
@@ -252,9 +246,6 @@ jobs:
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
2
.github/workflows/update-release-branch.yml
vendored
2
.github/workflows/update-release-branch.yml
vendored
@@ -1,7 +1,5 @@
|
||||
name: Update release branch
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 9 * * 1
|
||||
repository_dispatch:
|
||||
# Example of how to trigger this:
|
||||
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
||||
|
||||
13
CHANGELOG.md
13
CHANGELOG.md
@@ -1,5 +1,18 @@
|
||||
# CodeQL Action and CodeQL Runner Changelog
|
||||
|
||||
## 1.0.29 - 21 Jan 2022
|
||||
|
||||
- The feature to wait for SARIF processing to complete after upload has been disabled by default due to a bug in its interaction with pull requests from forks.
|
||||
|
||||
## 1.0.28 - 18 Jan 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.7.5. [#866](https://github.com/github/codeql-action/pull/866)
|
||||
- Fix a bug where SARIF files were failing upload due to an invalid test for unique categories. [#872](https://github.com/github/codeql-action/pull/872)
|
||||
|
||||
## 1.0.27 - 11 Jan 2022
|
||||
|
||||
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`.
|
||||
|
||||
## 1.0.26 - 10 Dec 2021
|
||||
|
||||
- Update default CodeQL bundle version to 2.7.3. [#842](https://github.com/github/codeql-action/pull/842)
|
||||
|
||||
@@ -59,6 +59,16 @@ inputs:
|
||||
description: Enable debugging mode. This will result in more output being produced which may be useful when debugging certain issues.
|
||||
required: false
|
||||
default: 'false'
|
||||
debug-artifact-name:
|
||||
description: >-
|
||||
The name of the artifact to store debugging information in.
|
||||
This is only used when debug mode is enabled.
|
||||
required: false
|
||||
debug-database-name:
|
||||
description: >-
|
||||
The name of the database uploaded to the debugging artifact.
|
||||
This is only used when debug mode is enabled.
|
||||
required: false
|
||||
outputs:
|
||||
codeql-path:
|
||||
description: The path of the CodeQL binary used for analysis
|
||||
|
||||
4
lib/analysis-paths.js
generated
4
lib/analysis-paths.js
generated
@@ -37,11 +37,11 @@ function buildIncludeExcludeEnvVar(paths) {
|
||||
return paths.join("\n");
|
||||
}
|
||||
function printPathFiltersWarning(config, logger) {
|
||||
// Index include/exclude/filters only work in javascript and python.
|
||||
// Index include/exclude/filters only work in javascript/python/ruby.
|
||||
// If any other languages are detected/configured then show a warning.
|
||||
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
|
||||
!config.languages.every(isInterpretedLanguage)) {
|
||||
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript and Python');
|
||||
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript, Python, and Ruby');
|
||||
}
|
||||
}
|
||||
exports.printPathFiltersWarning = printPathFiltersWarning;
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,CACL,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,IAAI,QAAQ,KAAK,MAAM,CAC1E,CAAC;AACJ,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}
|
||||
6
lib/analysis-paths.test.js
generated
6
lib/analysis-paths.test.js
generated
@@ -43,6 +43,8 @@ const util = __importStar(require("./util"));
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
@@ -65,6 +67,8 @@ const util = __importStar(require("./util"));
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||
@@ -88,6 +92,8 @@ const util = __importStar(require("./util"));
|
||||
dbLocation: path.resolve(tempDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;SACjB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;SACjB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;SACjB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
2
lib/analyze-action-env.test.js
generated
2
lib/analyze-action-env.test.js
generated
@@ -45,6 +45,7 @@ const util = __importStar(require("./util"));
|
||||
.resolves({});
|
||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
languages: [],
|
||||
});
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
@@ -53,6 +54,7 @@ const util = __importStar(require("./util"));
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
// When there are no action inputs for RAM and threads, the action uses
|
||||
// environment variables (passed down from the init action) to set RAM and
|
||||
// threads usage.
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAA+D;AAC/D,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
2
lib/analyze-action-input.test.js
generated
2
lib/analyze-action-input.test.js
generated
@@ -45,6 +45,7 @@ const util = __importStar(require("./util"));
|
||||
.resolves({});
|
||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
languages: [],
|
||||
});
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
@@ -53,6 +54,7 @@ const util = __importStar(require("./util"));
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
process.env["CODEQL_THREADS"] = "1";
|
||||
process.env["CODEQL_RAM"] = "4992";
|
||||
// Action inputs have precedence over environment variables.
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAA+D;AAC/D,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
42
lib/analyze-action.js
generated
42
lib/analyze-action.js
generated
@@ -29,6 +29,7 @@ const analyze_1 = require("./analyze");
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const database_upload_1 = require("./database-upload");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
@@ -71,12 +72,19 @@ async function run() {
|
||||
const outputDir = actionsUtil.getRequiredInput("output");
|
||||
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
|
||||
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags(config.gitHubVersion, apiDetails, repositoryNwo, logger);
|
||||
// We currently perform an API request in both the `init` and `analyze` Actions to determine
|
||||
// what feature flags are enabled. At the time of writing, this redundant API call is acceptable
|
||||
// to us, but if we wanted to avoid it, we could do so by serializing the feature flags as part
|
||||
// of the config file.
|
||||
void featureFlags.preloadFeatureFlags();
|
||||
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
|
||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
|
||||
if (config.debugMode) {
|
||||
// Upload the SARIF files as an Actions artifact for debugging
|
||||
await uploadDebugArtifacts(config.languages.map((lang) => path.resolve(outputDir, `${lang}.sarif`)), outputDir);
|
||||
await uploadDebugArtifacts(config.languages.map((lang) => path.resolve(outputDir, `${lang}.sarif`)), outputDir, config.debugArtifactName);
|
||||
}
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
@@ -90,10 +98,10 @@ async function run() {
|
||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||
toUpload.push(...listFolder(path.resolve(config.dbLocation, "log")));
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation);
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
||||
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
|
||||
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
|
||||
await uploadDebugArtifacts([path.resolve(config.tempDir, "compound-build-tracer.log")], config.tempDir);
|
||||
await uploadDebugArtifacts([path.resolve(config.tempDir, "compound-build-tracer.log")], config.tempDir, config.debugArtifactName);
|
||||
}
|
||||
}
|
||||
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
||||
@@ -110,19 +118,12 @@ async function run() {
|
||||
else {
|
||||
logger.info("Not uploading results");
|
||||
}
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger); // Possibly upload the database bundles for remote queries
|
||||
// Possibly upload the database bundles for remote queries
|
||||
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, featureFlags, apiDetails, logger);
|
||||
if (uploadResult !== undefined &&
|
||||
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
}
|
||||
if (config.debugMode) {
|
||||
// Upload the database bundles as an Actions artifact for debugging
|
||||
const toUpload = [];
|
||||
for (const language of config.languages)
|
||||
toUpload.push(await (0, util_1.bundleDb)(config, language, codeql));
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation);
|
||||
}
|
||||
}
|
||||
catch (origError) {
|
||||
const error = origError instanceof Error ? origError : new Error(String(origError));
|
||||
@@ -138,6 +139,19 @@ async function run() {
|
||||
return;
|
||||
}
|
||||
finally {
|
||||
if (config !== undefined && config.debugMode) {
|
||||
try {
|
||||
// Upload the database bundles as an Actions artifact for debugging
|
||||
const toUpload = [];
|
||||
for (const language of config.languages) {
|
||||
toUpload.push(await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`));
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
||||
}
|
||||
catch (error) {
|
||||
console.log(`Failed to upload database debug bundles: ${error}`);
|
||||
}
|
||||
}
|
||||
if (core.isDebug() && config !== undefined) {
|
||||
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||
for (const language of config.languages) {
|
||||
@@ -173,14 +187,14 @@ async function run() {
|
||||
await sendStatusReport(startedAt, undefined);
|
||||
}
|
||||
}
|
||||
async function uploadDebugArtifacts(toUpload, rootDir) {
|
||||
async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
|
||||
let suffix = "";
|
||||
const matrix = actionsUtil.getRequiredInput("matrix");
|
||||
if (matrix !== undefined && matrix !== "null") {
|
||||
for (const entry of Object.entries(JSON.parse(matrix)).sort())
|
||||
suffix += `-${entry[1]}`;
|
||||
}
|
||||
await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${util_1.DEBUG_ARTIFACT_NAME}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
|
||||
await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
|
||||
}
|
||||
function listFolder(dir) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
lib/analyze.test.js
generated
2
lib/analyze.test.js
generated
@@ -126,6 +126,8 @@ const util = __importStar(require("./util"));
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs,
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||
recursive: true,
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
{ "maximumVersion": "3.3", "minimumVersion": "3.0" }
|
||||
{ "maximumVersion": "3.4", "minimumVersion": "3.0" }
|
||||
|
||||
6
lib/codeql.js
generated
6
lib/codeql.js
generated
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
@@ -74,6 +74,7 @@ const CODEQL_VERSION_GROUP_RULES = "2.5.5";
|
||||
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
|
||||
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
|
||||
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
|
||||
exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
|
||||
/**
|
||||
* This variable controls using the new style of tracing from the CodeQL
|
||||
* CLI. In particular, with versions above this we will use both indirect
|
||||
@@ -637,12 +638,13 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
];
|
||||
await runTool(cmd, codeqlArgs);
|
||||
},
|
||||
async databaseBundle(databasePath, outputFilePath) {
|
||||
async databaseBundle(databasePath, outputFilePath, databaseName) {
|
||||
const args = [
|
||||
"database",
|
||||
"bundle",
|
||||
databasePath,
|
||||
`--output=${outputFilePath}`,
|
||||
`--name=${databaseName}`,
|
||||
];
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
|
||||
File diff suppressed because one or more lines are too long
54
lib/config-utils.js
generated
54
lib/config-utils.js
generated
@@ -25,8 +25,11 @@ const path = __importStar(require("path"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const util_1 = require("./util");
|
||||
// Property names from the user-supplied config file.
|
||||
const NAME_PROPERTY = "name";
|
||||
const DISABLE_DEFAULT_QUERIES_PROPERTY = "disable-default-queries";
|
||||
@@ -116,11 +119,26 @@ const builtinSuites = ["security-extended", "security-and-quality"];
|
||||
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
||||
* Throws an error if suiteName is not a valid builtin suite.
|
||||
*/
|
||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, configFile) {
|
||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureFlags, configFile) {
|
||||
const found = builtinSuites.find((suite) => suite === suiteName);
|
||||
if (!found) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||
}
|
||||
// If we're running the JavaScript security-extended analysis (or a superset of it) and the repo
|
||||
// is opted into the ML-powered queries beta, then add the ML-powered query pack so that we run
|
||||
// the ML-powered queries.
|
||||
if (languages.includes("javascript") &&
|
||||
(found === "security-extended" || found === "security-and-quality") &&
|
||||
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
|
||||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
|
||||
if (!packs.javascript) {
|
||||
packs.javascript = [];
|
||||
}
|
||||
packs.javascript.push({
|
||||
packName: "codeql/javascript-experimental-atm-queries",
|
||||
version: "~0.0.2",
|
||||
});
|
||||
}
|
||||
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
|
||||
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
||||
}
|
||||
@@ -180,7 +198,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
|
||||
* local paths starting with './', or references to remote repos, or
|
||||
* a finite set of hardcoded terms for builtin suites.
|
||||
*/
|
||||
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, workspacePath, apiDetails, logger, configFile) {
|
||||
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureFlags, logger, configFile) {
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
@@ -192,7 +210,7 @@ async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir,
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
||||
await addBuiltinSuiteQueries(languages, codeQL, resultMap, queryUses, configFile);
|
||||
await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureFlags, configFile);
|
||||
return;
|
||||
}
|
||||
// Otherwise, must be a reference to another repo
|
||||
@@ -404,12 +422,12 @@ async function getLanguages(codeQL, languagesInput, repository, apiDetails, logg
|
||||
}
|
||||
return parsedLanguages;
|
||||
}
|
||||
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, workspacePath, apiDetails, logger) {
|
||||
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureFlags, logger) {
|
||||
queriesInput = queriesInput.trim();
|
||||
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
||||
queriesInput = queriesInput.replace(/^\+/, "");
|
||||
for (const query of queriesInput.split(",")) {
|
||||
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, workspacePath, apiDetails, logger);
|
||||
await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
}
|
||||
}
|
||||
// Returns true if either no queries were provided in the workflow.
|
||||
@@ -425,7 +443,7 @@ function shouldAddConfigFileQueries(queriesInput) {
|
||||
/**
|
||||
* Get the default config for when the user has not supplied one.
|
||||
*/
|
||||
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
|
||||
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
var _a;
|
||||
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
|
||||
const queries = {};
|
||||
@@ -436,10 +454,10 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
|
||||
};
|
||||
}
|
||||
await addDefaultQueries(codeQL, languages, queries);
|
||||
if (queriesInput) {
|
||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
|
||||
}
|
||||
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
|
||||
if (queriesInput) {
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
}
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
@@ -453,13 +471,15 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
|
||||
gitHubVersion,
|
||||
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
};
|
||||
}
|
||||
exports.getDefaultConfig = getDefaultConfig;
|
||||
/**
|
||||
* Load the config from the given file.
|
||||
*/
|
||||
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
|
||||
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
var _a;
|
||||
let parsedYAML;
|
||||
if (isLocal(configFile)) {
|
||||
@@ -500,12 +520,13 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
if (!disableDefaultQueries) {
|
||||
await addDefaultQueries(codeQL, languages, queries);
|
||||
}
|
||||
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
|
||||
// If queries were provided using `with` in the action configuration,
|
||||
// they should take precedence over the queries in the config file
|
||||
// unless they're prefixed with "+", in which case they supplement those
|
||||
// in the config file.
|
||||
if (queriesInput) {
|
||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
}
|
||||
if (shouldAddConfigFileQueries(queriesInput) &&
|
||||
QUERIES_PROPERTY in parsedYAML) {
|
||||
@@ -518,7 +539,7 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, logger, configFile);
|
||||
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureFlags, logger, configFile);
|
||||
}
|
||||
}
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
@@ -543,7 +564,6 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
|
||||
}
|
||||
}
|
||||
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
@@ -557,6 +577,8 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
gitHubVersion,
|
||||
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
};
|
||||
}
|
||||
/**
|
||||
@@ -682,16 +704,16 @@ function dbLocationOrDefault(dbLocation, tempDir) {
|
||||
* This will parse the config from the user input if present, or generate
|
||||
* a default config. The parsed config is then stored to a known location.
|
||||
*/
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
var _a, _b, _c;
|
||||
let config;
|
||||
// If no config file was provided create an empty one
|
||||
if (!configFile) {
|
||||
logger.debug("No configuration file was provided");
|
||||
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
|
||||
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
}
|
||||
else {
|
||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
|
||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
}
|
||||
// The list of queries should not be empty for any language. If it is then
|
||||
// it is a user configuration error.
|
||||
|
||||
File diff suppressed because one or more lines are too long
95
lib/config-utils.test.js
generated
95
lib/config-utils.test.js
generated
@@ -31,6 +31,7 @@ const sinon = __importStar(require("sinon"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
@@ -88,8 +89,8 @@ function mockListLanguages(languages) {
|
||||
};
|
||||
},
|
||||
});
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger));
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("loading config saves config", async (t) => {
|
||||
@@ -111,7 +112,7 @@ function mockListLanguages(languages) {
|
||||
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
// Sanity check that getConfig returns undefined before we have called initConfig
|
||||
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
|
||||
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
|
||||
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
// And that same newly-initialised config should now be returned by getConfig
|
||||
@@ -122,7 +123,7 @@ function mockListLanguages(languages) {
|
||||
(0, ava_1.default)("load input outside of workspace", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -135,7 +136,7 @@ function mockListLanguages(languages) {
|
||||
// no filename given, just a repo
|
||||
const configFile = "octo-org/codeql-config@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -149,7 +150,7 @@ function mockListLanguages(languages) {
|
||||
const configFile = "input";
|
||||
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -215,10 +216,12 @@ function mockListLanguages(languages) {
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: "my-artifact",
|
||||
debugDatabaseName: "my-db",
|
||||
};
|
||||
const languages = "javascript";
|
||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "my-artifact", "my-db", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
@@ -254,7 +257,7 @@ function mockListLanguages(languages) {
|
||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||
const languages = "javascript";
|
||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolve queries was called correctly
|
||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||
t.deepEqual(resolveQueriesArgs[0].queries, [
|
||||
@@ -297,7 +300,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries
|
||||
// and once for `./foo` from the config file.
|
||||
@@ -330,7 +333,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries and once for `./override`,
|
||||
// but won't be called for './foo' from the config file.
|
||||
@@ -362,7 +365,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for `./workflow-query`,
|
||||
// but won't be called for the default one since that was disabled
|
||||
@@ -388,7 +391,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly:
|
||||
// It'll be called once for the default queries,
|
||||
// and then once for each of the two queries from the workflow
|
||||
@@ -427,7 +430,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries,
|
||||
// once for each of additional1 and additional2,
|
||||
@@ -466,7 +469,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
try {
|
||||
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.fail("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -509,7 +512,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
|
||||
const configFile = "octo-org/codeql-config/config.yaml@main";
|
||||
const languages = "javascript";
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(spyGetContents.called);
|
||||
});
|
||||
});
|
||||
@@ -519,7 +522,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -535,7 +538,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -552,7 +555,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -564,7 +567,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const languages = "rubbish,english";
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -592,7 +595,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
const configFile = path.join(tmpDir, "codeql-config.yaml");
|
||||
fs.writeFileSync(configFile, inputFileContents);
|
||||
const languages = "javascript";
|
||||
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: [
|
||||
{
|
||||
@@ -631,7 +634,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
fs.writeFileSync(configFile, inputFileContents);
|
||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||
const languages = "javascript,python,cpp";
|
||||
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: [
|
||||
{
|
||||
@@ -684,7 +687,7 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
|
||||
const inputFile = path.join(tmpDir, configFile);
|
||||
fs.writeFileSync(inputFile, inputFileContents, "utf8");
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -867,6 +870,50 @@ parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and
|
||||
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
|
||||
(0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], /remove the '\+'/);
|
||||
(0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], /"xxx" is not a valid pack/);
|
||||
// errors
|
||||
// input w invalid pack name
|
||||
async function mlPoweredQueriesMacro(t, codeQLVersion, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries) {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async getVersion() {
|
||||
return codeQLVersion;
|
||||
},
|
||||
async resolveQueries() {
|
||||
return {
|
||||
byLanguage: {
|
||||
javascript: { "fake-query.ql": {} },
|
||||
},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
const { packs } = await configUtils.initConfig("javascript", queriesInput, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
|
||||
? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled]
|
||||
: []), (0, logging_1.getRunnerLogger)(true));
|
||||
if (shouldRunMlPoweredQueries) {
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: [
|
||||
{
|
||||
packName: "codeql/javascript-experimental-atm-queries",
|
||||
version: "~0.0.2",
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
else {
|
||||
t.deepEqual(packs, {});
|
||||
}
|
||||
});
|
||||
}
|
||||
mlPoweredQueriesMacro.title = (_providedTitle, codeQLVersion, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries) => {
|
||||
const queriesInputDescription = queriesInput
|
||||
? `'queries: ${queriesInput}'`
|
||||
: "default config";
|
||||
return `ML-powered queries ${shouldRunMlPoweredQueries ? "are" : "aren't"} loaded for ${queriesInputDescription} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`;
|
||||
};
|
||||
// macro, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, "security-extended", false);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, "security-extended", false);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, false);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "security-extended", true);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "security-and-quality", true);
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
File diff suppressed because one or more lines are too long
24
lib/database-upload.js
generated
24
lib/database-upload.js
generated
@@ -24,9 +24,10 @@ const fs = __importStar(require("fs"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const codeql_1 = require("./codeql");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||
async function uploadDatabases(repositoryNwo, config, featureFlags, apiDetails, logger) {
|
||||
if (actionsUtil.getRequiredInput("upload-database") !== "true") {
|
||||
logger.debug("Database upload disabled in workflow. Skipping upload.");
|
||||
return;
|
||||
@@ -41,32 +42,19 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||
logger.debug("Not analyzing default branch. Skipping upload.");
|
||||
return;
|
||||
}
|
||||
const client = (0, api_client_1.getApiClient)(apiDetails);
|
||||
let useUploadDomain;
|
||||
try {
|
||||
const response = await client.request("GET /repos/:owner/:repo/code-scanning/codeql/databases", {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
});
|
||||
useUploadDomain = response.data["uploads_domain_enabled"];
|
||||
}
|
||||
catch (e) {
|
||||
if (util.isHTTPError(e) && e.status === 404) {
|
||||
if (!(await featureFlags.getValue(feature_flags_1.FeatureFlag.DatabaseUploadsEnabled))) {
|
||||
logger.debug("Repository is not opted in to database uploads. Skipping upload.");
|
||||
}
|
||||
else {
|
||||
console.log(e);
|
||||
logger.info(`Skipping database upload due to unknown error: ${e}`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
const client = (0, api_client_1.getApiClient)(apiDetails);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const useUploadDomain = await featureFlags.getValue(feature_flags_1.FeatureFlag.UploadsDomainEnabled);
|
||||
for (const language of config.languages) {
|
||||
// Upload the database bundle.
|
||||
// Although we are uploading arbitrary file contents to the API, it's worth
|
||||
// noting that it's the API's job to validate that the contents is acceptable.
|
||||
// This API method is available to anyone with write access to the repo.
|
||||
const payload = fs.readFileSync(await (0, util_1.bundleDb)(config, language, codeql));
|
||||
const payload = fs.readFileSync(await (0, util_1.bundleDb)(config, language, codeql, language));
|
||||
try {
|
||||
if (useUploadDomain) {
|
||||
await client.request(`POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name`, {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,IAAI,eAAwB,CAAC;IAC7B,IAAI;QACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,wDAAwD,EACxD;YACE,KAAK,EAAE,aAAa,CAAC,KAAK;YAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;SACzB,CACF,CAAC;QACF,eAAe,GAAG,QAAQ,CAAC,IAAI,CAAC,wBAAwB,CAAC,CAAC;KAC3D;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3C,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;SACH;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,MAAM,CAAC,IAAI,CAAC,kDAAkD,CAAC,EAAE,CAAC,CAAC;SACpE;QACD,OAAO;KACR;IAED,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,8BAA8B;QAC9B,2EAA2E;QAC3E,8EAA8E;QAC9E,wEAAwE;QACxE,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC,CAAC;QAC1E,IAAI;YACF,IAAI,eAAe,EAAE;gBACnB,MAAM,MAAM,CAAC,OAAO,CAClB,wGAAwG,EACxG;oBACE,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;oBAC5B,IAAI,EAAE,OAAO;oBACb,OAAO,EAAE;wBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;qBAC1C;iBACF,CACF,CAAC;aACH;iBAAM;gBACL,MAAM,MAAM,CAAC,OAAO,CAClB,kEAAkE,EAClE;oBACE,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,OAAO;iBACd,CACF,CAAC;aACH;YACD,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAtFD,0CAsFC"}
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAErC,mDAA4D;AAG5D,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,YAA0B,EAC1B,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,sBAAsB,CAAC,CAAC,EAAE;QACtE,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;QACF,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,MAAM,eAAe,GAAG,MAAM,YAAY,CAAC,QAAQ,CACjD,2BAAW,CAAC,oBAAoB,CACjC,CAAC;IAEF,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,8BAA8B;QAC9B,2EAA2E;QAC3E,8EAA8E;QAC9E,wEAAwE;QACxE,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAC7B,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CACnD,CAAC;QACF,IAAI;YACF,IAAI,eAAe,EAAE;gBACnB,MAAM,MAAM,CAAC,OAAO,CAClB,wGAAwG,EACxG;oBACE,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;oBAC5B,IAAI,EAAE,OAAO;oBACb,OAAO,EAAE;wBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;qBAC1C;iBACF,CACF,CAAC;aACH;iBAAM;gBACL,MAAM,MAAM,CAAC,OAAO,CAClB,kEAAkE,EAClE;oBACE,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,OAAO;iBACd,CACF,CAAC;aACH;YACD,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AA9ED,0CA8EC"}
|
||||
99
lib/database-upload.test.js
generated
99
lib/database-upload.test.js
generated
@@ -30,6 +30,7 @@ const actionsUtil = __importStar(require("./actions-util"));
|
||||
const apiClient = __importStar(require("./api-client"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const database_upload_1 = require("./database-upload");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
@@ -37,6 +38,10 @@ const util_1 = require("./util");
|
||||
ava_1.default.beforeEach(() => {
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||
});
|
||||
const uploadToUploadsDomainFlags = (0, feature_flags_1.createFeatureFlags)([
|
||||
feature_flags_1.FeatureFlag.DatabaseUploadsEnabled,
|
||||
feature_flags_1.FeatureFlag.UploadsDomainEnabled,
|
||||
]);
|
||||
const testRepoName = { owner: "github", repo: "example" };
|
||||
const testApiDetails = {
|
||||
auth: "1234",
|
||||
@@ -56,51 +61,15 @@ function getTestConfig(tmpDir) {
|
||||
dbLocation: tmpDir,
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util_1.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util_1.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
}
|
||||
function getRecordingLogger(messages) {
|
||||
return {
|
||||
debug: (message) => {
|
||||
messages.push({ type: "debug", message });
|
||||
console.debug(message);
|
||||
},
|
||||
info: (message) => {
|
||||
messages.push({ type: "info", message });
|
||||
console.info(message);
|
||||
},
|
||||
warning: (message) => {
|
||||
messages.push({ type: "warning", message });
|
||||
console.warn(message);
|
||||
},
|
||||
error: (message) => {
|
||||
messages.push({ type: "error", message });
|
||||
console.error(message);
|
||||
},
|
||||
isDebug: () => true,
|
||||
startGroup: () => undefined,
|
||||
endGroup: () => undefined,
|
||||
};
|
||||
}
|
||||
function mockHttpRequests(optInStatusCode, useUploadDomain, databaseUploadStatusCode) {
|
||||
async function mockHttpRequests(featureFlags, databaseUploadStatusCode) {
|
||||
// Passing an auth token is required, so we just use a dummy value
|
||||
const client = github.getOctokit("123");
|
||||
const requestSpy = sinon.stub(client, "request");
|
||||
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql/databases");
|
||||
if (optInStatusCode < 300) {
|
||||
optInSpy.resolves({
|
||||
status: optInStatusCode,
|
||||
data: {
|
||||
useUploadDomain,
|
||||
},
|
||||
headers: {},
|
||||
url: "GET /repos/:owner/:repo/code-scanning/codeql/databases",
|
||||
});
|
||||
}
|
||||
else {
|
||||
optInSpy.throws(new util_1.HTTPError("some error message", optInStatusCode));
|
||||
}
|
||||
if (databaseUploadStatusCode !== undefined) {
|
||||
const url = useUploadDomain
|
||||
const url = (await featureFlags.getValue(feature_flags_1.FeatureFlag.UploadsDomainEnabled))
|
||||
? "POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name"
|
||||
: "PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language";
|
||||
const databaseUploadSpy = requestSpy.withArgs(url);
|
||||
@@ -110,7 +79,6 @@ function mockHttpRequests(optInStatusCode, useUploadDomain, databaseUploadStatus
|
||||
else {
|
||||
databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
|
||||
}
|
||||
}
|
||||
sinon.stub(apiClient, "getApiClient").value(() => client);
|
||||
}
|
||||
(0, ava_1.default)("Abort database upload if 'upload-database' input set to false", async (t) => {
|
||||
@@ -122,7 +90,7 @@ function mockHttpRequests(optInStatusCode, useUploadDomain, databaseUploadStatus
|
||||
.returns("false");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Database upload disabled in workflow. Skipping upload.") !== undefined);
|
||||
});
|
||||
@@ -138,7 +106,7 @@ function mockHttpRequests(optInStatusCode, useUploadDomain, databaseUploadStatus
|
||||
const config = getTestConfig(tmpDir);
|
||||
config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" };
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, config, (0, feature_flags_1.createFeatureFlags)([]), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Not running against github.com. Skipping upload.") !== undefined);
|
||||
});
|
||||
@@ -154,7 +122,7 @@ function mockHttpRequests(optInStatusCode, useUploadDomain, databaseUploadStatus
|
||||
const config = getTestConfig(tmpDir);
|
||||
config.gitHubVersion = { type: util_1.GitHubVariant.GHAE };
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, config, (0, feature_flags_1.createFeatureFlags)([]), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Not running against github.com. Skipping upload.") !== undefined);
|
||||
});
|
||||
@@ -168,12 +136,12 @@ function mockHttpRequests(optInStatusCode, useUploadDomain, databaseUploadStatus
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Not analyzing default branch. Skipping upload.") !== undefined);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Abort database upload if opt-in request returns 404", async (t) => {
|
||||
(0, ava_1.default)("Abort database upload if feature flag is disabled", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
sinon
|
||||
@@ -181,40 +149,18 @@ function mockHttpRequests(optInStatusCode, useUploadDomain, databaseUploadStatus
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
mockHttpRequests(404);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle() {
|
||||
return;
|
||||
},
|
||||
});
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.UploadsDomainEnabled]), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message ===
|
||||
"Repository is not opted in to database uploads. Skipping upload.") !== undefined);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Abort database upload if opt-in request fails with something other than 404", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
mockHttpRequests(500);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle() {
|
||||
return;
|
||||
},
|
||||
});
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "info" &&
|
||||
v.message ===
|
||||
"Skipping database upload due to unknown error: Error: some error message") !== undefined);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Don't crash if uploading a database fails", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
@@ -223,14 +169,17 @@ function mockHttpRequests(optInStatusCode, useUploadDomain, databaseUploadStatus
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
mockHttpRequests(200, false, 500);
|
||||
const featureFlags = (0, feature_flags_1.createFeatureFlags)([
|
||||
feature_flags_1.FeatureFlag.DatabaseUploadsEnabled,
|
||||
]);
|
||||
await mockHttpRequests(featureFlags, 500);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
fs.writeFileSync(outputFilePath, "");
|
||||
},
|
||||
});
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), featureFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "warning" &&
|
||||
v.message ===
|
||||
"Failed to upload database for javascript: Error: some error message") !== undefined);
|
||||
@@ -244,14 +193,14 @@ function mockHttpRequests(optInStatusCode, useUploadDomain, databaseUploadStatus
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
mockHttpRequests(200, false, 201);
|
||||
await mockHttpRequests(uploadToUploadsDomainFlags, 201);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
fs.writeFileSync(outputFilePath, "");
|
||||
},
|
||||
});
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Successfully uploaded database for javascript") !== undefined);
|
||||
});
|
||||
@@ -264,14 +213,14 @@ function mockHttpRequests(optInStatusCode, useUploadDomain, databaseUploadStatus
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
mockHttpRequests(200, true, 201);
|
||||
await mockHttpRequests(uploadToUploadsDomainFlags, 201);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
fs.writeFileSync(outputFilePath, "");
|
||||
},
|
||||
});
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Successfully uploaded database for javascript") !== undefined);
|
||||
});
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20211208"
|
||||
"bundleVersion": "codeql-bundle-20220112"
|
||||
}
|
||||
|
||||
91
lib/feature-flags.js
generated
Normal file
91
lib/feature-flags.js
generated
Normal file
@@ -0,0 +1,91 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createFeatureFlags = exports.GitHubFeatureFlags = exports.FeatureFlag = void 0;
|
||||
const api_client_1 = require("./api-client");
|
||||
const util = __importStar(require("./util"));
|
||||
var FeatureFlag;
|
||||
(function (FeatureFlag) {
|
||||
FeatureFlag["DatabaseUploadsEnabled"] = "database_uploads_enabled";
|
||||
FeatureFlag["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||
FeatureFlag["UploadsDomainEnabled"] = "uploads_domain_enabled";
|
||||
})(FeatureFlag = exports.FeatureFlag || (exports.FeatureFlag = {}));
|
||||
class GitHubFeatureFlags {
|
||||
constructor(gitHubVersion, apiDetails, repositoryNwo, logger) {
|
||||
this.gitHubVersion = gitHubVersion;
|
||||
this.apiDetails = apiDetails;
|
||||
this.repositoryNwo = repositoryNwo;
|
||||
this.logger = logger;
|
||||
}
|
||||
async getValue(flag) {
|
||||
const response = (await this.getApiResponse())[flag];
|
||||
if (response === undefined) {
|
||||
this.logger.debug(`Feature flag '${flag}' undefined in API response, considering it disabled.`);
|
||||
return false;
|
||||
}
|
||||
return response;
|
||||
}
|
||||
async preloadFeatureFlags() {
|
||||
await this.getApiResponse();
|
||||
}
|
||||
async getApiResponse() {
|
||||
const loadApiResponse = async () => {
|
||||
// Do nothing when not running against github.com
|
||||
if (this.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
|
||||
this.logger.debug("Not running against github.com. Disabling all feature flags.");
|
||||
return {};
|
||||
}
|
||||
const client = (0, api_client_1.getApiClient)(this.apiDetails);
|
||||
try {
|
||||
const response = await client.request("GET /repos/:owner/:repo/code-scanning/codeql-action/features", {
|
||||
owner: this.repositoryNwo.owner,
|
||||
repo: this.repositoryNwo.repo,
|
||||
});
|
||||
return response.data;
|
||||
}
|
||||
catch (e) {
|
||||
// Some feature flags, such as `ml_powered_queries_enabled` affect the produced alerts.
|
||||
// Considering these feature flags disabled in the event of a transient error could
|
||||
// therefore lead to alert churn. As a result, we crash if we cannot determine the value of
|
||||
// the feature flags.
|
||||
throw new Error(`Encountered an error while trying to load feature flags: ${e}`);
|
||||
}
|
||||
};
|
||||
const apiResponse = this.cachedApiResponse || (await loadApiResponse());
|
||||
this.cachedApiResponse = apiResponse;
|
||||
return apiResponse;
|
||||
}
|
||||
}
|
||||
exports.GitHubFeatureFlags = GitHubFeatureFlags;
|
||||
/**
|
||||
* Create a feature flags instance with the specified set of enabled flags.
|
||||
*
|
||||
* This should be only used within tests.
|
||||
*/
|
||||
function createFeatureFlags(enabledFlags) {
|
||||
return {
|
||||
getValue: async (flag) => {
|
||||
return enabledFlags.includes(flag);
|
||||
},
|
||||
};
|
||||
}
|
||||
exports.createFeatureFlags = createFeatureFlags;
|
||||
//# sourceMappingURL=feature-flags.js.map
|
||||
1
lib/feature-flags.js.map
Normal file
1
lib/feature-flags.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA8D;AAG9D,6CAA+B;AAM/B,IAAY,WAIX;AAJD,WAAY,WAAW;IACrB,kEAAmD,CAAA;IACnD,qEAAsD,CAAA;IACtD,8DAA+C,CAAA;AACjD,CAAC,EAJW,WAAW,GAAX,mBAAW,KAAX,mBAAW,QAItB;AAUD,MAAa,kBAAkB;IAG7B,YACU,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,eAAU,GAAV,UAAU,CAAkB;QAC5B,kBAAa,GAAb,aAAa,CAAe;QAC5B,WAAM,GAAN,MAAM,CAAQ;IACrB,CAAC;IAEJ,KAAK,CAAC,QAAQ,CAAC,IAAiB;QAC9B,MAAM,QAAQ,GAAG,CAAC,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC;QACrD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,iBAAiB,IAAI,uDAAuD,CAC7E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,KAAK,CAAC,mBAAmB;QACvB,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;IAC9B,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;YACjC,iDAAiD;YACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;gBACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8DAA8D,CAC/D,CAAC;gBACF,OAAO,EAAE,CAAC;aACX;YACD,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC7C,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,8DAA8D,EAC9D;oBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;oBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;iBAC9B,CACF,CAAC;gBACF,OAAO,QAAQ,CAAC,IAAI,CAAC;aACtB;YAAC,OAAO,CAAC,EAAE;gBACV,uFAAuF;gBACvF,mFAAmF;gBACnF,2FAA2F;gBAC3F,qBAAqB;gBACrB,MAAM,IAAI,KAAK,CACb,4DAA4D,CAAC,EAAE,CAChE,CAAC;aACH;QACH,CAAC,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,CAAC,iBAAiB,IAAI,CAAC,MAAM,eAAe,EAAE,CAAC,CAAC;QACxE,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;CACF;AA3DD,gDA2DC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,YAA2B;IAC5D,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YACvB,OAAO,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;AACJ,CAAC;AAND,gDAMC"}
|
||||
98
lib/feature-flags.test.js
generated
Normal file
98
lib/feature-flags.test.js
generated
Normal file
@@ -0,0 +1,98 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
ava_1.default.beforeEach(() => {
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||
});
|
||||
const testApiDetails = {
|
||||
auth: "1234",
|
||||
url: "https://github.com",
|
||||
};
|
||||
const testRepositoryNwo = (0, repository_1.parseRepositoryNwo)("github/example");
|
||||
const ALL_FEATURE_FLAGS_DISABLED_VARIANTS = [
|
||||
{
|
||||
description: "GHES",
|
||||
gitHubVersion: { type: util_1.GitHubVariant.GHES, version: "3.0.0" },
|
||||
},
|
||||
{ description: "GHAE", gitHubVersion: { type: util_1.GitHubVariant.GHAE } },
|
||||
];
|
||||
for (const variant of ALL_FEATURE_FLAGS_DISABLED_VARIANTS) {
|
||||
(0, ava_1.default)(`All feature flags are disabled if running against ${variant.description}`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const loggedMessages = [];
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags(variant.gitHubVersion, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
|
||||
t.assert((await featureFlags.getValue(flag)) === false);
|
||||
}
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message ===
|
||||
"Not running against github.com. Disabling all feature flags.") !== undefined);
|
||||
});
|
||||
});
|
||||
}
|
||||
(0, ava_1.default)("Feature flags are disabled if they're not returned in API response", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const loggedMessages = [];
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
|
||||
t.assert((await featureFlags.getValue(flag)) === false);
|
||||
}
|
||||
for (const featureFlag of [
|
||||
"database_uploads_enabled",
|
||||
"ml_powered_queries_enabled",
|
||||
"uploads_domain_enabled",
|
||||
]) {
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message ===
|
||||
`Feature flag '${featureFlag}' undefined in API response, considering it disabled.`) !== undefined);
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Feature flags exception is propagated if the API request errors", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, logging_1.getRunnerLogger)(true));
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(500, {});
|
||||
await t.throwsAsync(async () => featureFlags.preloadFeatureFlags(), {
|
||||
message: "Encountered an error while trying to load feature flags: Error: some error message",
|
||||
});
|
||||
});
|
||||
});
|
||||
const FEATURE_FLAGS = [
|
||||
"database_uploads_enabled",
|
||||
"ml_powered_queries_enabled",
|
||||
"uploads_domain_enabled",
|
||||
];
|
||||
for (const featureFlag of FEATURE_FLAGS) {
|
||||
(0, ava_1.default)(`Feature flag '${featureFlag}' is enabled if enabled in the API response`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, logging_1.getRunnerLogger)(true));
|
||||
const expectedFeatureFlags = {};
|
||||
for (const f of FEATURE_FLAGS) {
|
||||
expectedFeatureFlags[f] = false;
|
||||
}
|
||||
expectedFeatureFlags[featureFlag] = true;
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureFlags);
|
||||
const actualFeatureFlags = {
|
||||
database_uploads_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.DatabaseUploadsEnabled),
|
||||
ml_powered_queries_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled),
|
||||
uploads_domain_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.UploadsDomainEnabled),
|
||||
};
|
||||
t.deepEqual(actualFeatureFlags, expectedFeatureFlags);
|
||||
});
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=feature-flags.test.js.map
|
||||
1
lib/feature-flags.test.js.map
Normal file
1
lib/feature-flags.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"feature-flags.test.js","sourceRoot":"","sources":["../src/feature-flags.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAGvB,mDAAkE;AAClE,uCAA4C;AAC5C,6CAAkD;AAClD,mDAMyB;AAEzB,iCAAgF;AAEhF,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,MAAM,cAAc,GAAqB;IACvC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,oBAAoB;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAA,+BAAkB,EAAC,gBAAgB,CAAC,CAAC;AAE/D,MAAM,mCAAmC,GAGpC;IACH;QACE,WAAW,EAAE,MAAM;QACnB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC9D;IACD,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,EAAE;CACrE,CAAC;AAEF,KAAK,MAAM,OAAO,IAAI,mCAAmC,EAAE;IACzD,IAAA,aAAI,EAAC,qDAAqD,OAAO,CAAC,WAAW,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC3F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,OAAO,CAAC,aAAa,EACrB,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;YAEF,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;gBAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;aACzD;YAED,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,8DAA8D,CACnE,KAAK,SAAS,CAChB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ;AAED,IAAA,aAAI,EAAC,oEAAoE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI;YACxB,0BAA0B;YAC1B,4BAA4B;YAC5B,wBAAwB;SACzB,EAAE;YACD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,iBAAiB,WAAW,uDAAuD,CACxF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iEAAiE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,MAAM,CAAC,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE,CAAC,YAAY,CAAC,mBAAmB,EAAE,EAAE;YAClE,OAAO,EACL,oFAAoF;SACvF,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,MAAM,aAAa,GAAG;IACpB,0BAA0B;IAC1B,4BAA4B;IAC5B,wBAAwB;CACzB,CAAC;AAEF,KAAK,MAAM,WAAW,IAAI,aAAa,EAAE;IACvC,IAAA,aAAI,EAAC,iBAAiB,WAAW,6CAA6C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YAEF,MAAM,oBAAoB,GAAG,EAAE,CAAC;YAChC,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;gBAC7B,oBAAoB,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;aACjC;YACD,oBAAoB,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;YACzC,IAAA,0CAA0B,EAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAEtD,MAAM,kBAAkB,GAAG;gBACzB,wBAAwB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACnD,2BAAW,CAAC,sBAAsB,CACnC;gBACD,0BAA0B,EAAE,MAAM,YAAY,CAAC,QAAQ,CACrD,2BAAW,CAAC,uBAAuB,CACpC;gBACD,sBAAsB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACjD,2BAAW,CAAC,oBAAoB,CACjC;aACF,CAAC;YAEF,CAAC,CAAC,SAAS,CAAC,kBAAkB,EAAE,oBAAoB,CAAC,CAAC;QACxD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ"}
|
||||
5
lib/fingerprints.js
generated
5
lib/fingerprints.js
generated
@@ -226,9 +226,8 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
|
||||
exports.resolveUriToFile = resolveUriToFile;
|
||||
// Compute fingerprints for results in the given sarif file
|
||||
// and return an updated sarif file contents.
|
||||
async function addFingerprints(sarifContents, sourceRoot, logger) {
|
||||
async function addFingerprints(sarif, sourceRoot, logger) {
|
||||
var _a, _b, _c;
|
||||
const sarif = JSON.parse(sarifContents);
|
||||
// Gather together results for the same file and construct
|
||||
// callbacks to accept hashes for that file and update the location
|
||||
const callbacksByFile = {};
|
||||
@@ -266,7 +265,7 @@ async function addFingerprints(sarifContents, sourceRoot, logger) {
|
||||
};
|
||||
await hash(teeCallback, filepath);
|
||||
}
|
||||
return JSON.stringify(sarif);
|
||||
return sarif;
|
||||
}
|
||||
exports.addFingerprints = addFingerprints;
|
||||
//# sourceMappingURL=fingerprints.js.map
|
||||
File diff suppressed because one or more lines are too long
22
lib/fingerprints.test.js
generated
22
lib/fingerprints.test.js
generated
@@ -169,30 +169,24 @@ function testResolveUriToFile(uri, index, artifactsURIs) {
|
||||
});
|
||||
(0, ava_1.default)("addFingerprints", async (t) => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs
|
||||
const input = JSON.parse(fs
|
||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
|
||||
.toString();
|
||||
let expected = fs
|
||||
.toString());
|
||||
const expected = JSON.parse(fs
|
||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.expected.sarif`)
|
||||
.toString();
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
.toString());
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
|
||||
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, (0, logging_1.getRunnerLogger)(true)), expected);
|
||||
});
|
||||
(0, ava_1.default)("missingRegions", async (t) => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs
|
||||
const input = JSON.parse(fs
|
||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
|
||||
.toString();
|
||||
let expected = fs
|
||||
.toString());
|
||||
const expected = JSON.parse(fs
|
||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.expected.sarif`)
|
||||
.toString();
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
.toString());
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
|
||||
t.deepEqual(await fingerprints.addFingerprints(input, sourceRoot, (0, logging_1.getRunnerLogger)(true)), expected);
|
||||
|
||||
File diff suppressed because one or more lines are too long
10
lib/init-action.js
generated
10
lib/init-action.js
generated
@@ -23,6 +23,7 @@ const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const codeql_1 = require("./codeql");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const init_1 = require("./init");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
@@ -78,6 +79,9 @@ async function run() {
|
||||
};
|
||||
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.actions);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger);
|
||||
void featureFlags.preloadFeatureFlags();
|
||||
try {
|
||||
const workflowErrors = await (0, actions_util_1.validateWorkflow)();
|
||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
||||
@@ -87,7 +91,11 @@ async function run() {
|
||||
codeql = initCodeQLResult.codeql;
|
||||
toolsVersion = initCodeQLResult.toolsVersion;
|
||||
await (0, util_1.enrichEnvironment)(util_1.Mode.actions, codeql);
|
||||
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, actions_util_1.getOptionalInput)("debug") === "true", (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), (0, actions_util_1.getTemporaryDirectory)(), (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE"), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger);
|
||||
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, actions_util_1.getOptionalInput)("debug") === "true", (0, actions_util_1.getOptionalInput)("debug-artifact-name") || util_1.DEFAULT_DEBUG_ARTIFACT_NAME, (0, actions_util_1.getOptionalInput)("debug-database-name") || util_1.DEFAULT_DEBUG_DATABASE_NAME, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE"), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, featureFlags, logger);
|
||||
if (config.languages.some(languages_1.isTracedLanguage)) {
|
||||
// We currently do not support tracing on Windows 11 and Windows Server 2022
|
||||
(0, util_1.checkNotWindows11)();
|
||||
}
|
||||
if (config.languages.includes(languages_1.Language.python) &&
|
||||
(0, actions_util_1.getRequiredInput)("setup-python-dependencies") === "true") {
|
||||
try {
|
||||
|
||||
File diff suppressed because one or more lines are too long
4
lib/init.js
generated
4
lib/init.js
generated
@@ -38,9 +38,9 @@ async function initCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant,
|
||||
return { codeql, toolsVersion };
|
||||
}
|
||||
exports.initCodeQL = initCodeQL;
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
logger.startGroup("Load language configuration");
|
||||
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
|
||||
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
analysisPaths.printPathFiltersWarning(config, logger);
|
||||
logger.endGroup();
|
||||
return config;
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,SAAkB,EAClB,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AApCD,gCAoCC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;QAChE,0BAA0B;QAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,SAAS,EAChB,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;KACH;SAAM;QACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,yBAAyB;YACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;SACH;KACF;IAED,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AA9BD,0BA8BC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,YAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,YAAY,EACZ,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA1CD,gCA0CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;QAChE,0BAA0B;QAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,SAAS,EAChB,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;KACH;SAAM;QACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,yBAAyB;YACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;SACH;KACF;IAED,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AA9BD,0BA8BC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}
|
||||
3
lib/runner.js
generated
3
lib/runner.js
generated
@@ -31,6 +31,7 @@ const analyze_1 = require("./analyze");
|
||||
const autobuild_1 = require("./autobuild");
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const init_1 = require("./init");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
@@ -163,7 +164,7 @@ program
|
||||
}
|
||||
await (0, util_1.enrichEnvironment)(util_1.Mode.runner, codeql);
|
||||
const workspacePath = checkoutPath;
|
||||
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, false, (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, logger);
|
||||
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, false, "", "", (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
const sourceRoot = checkoutPath;
|
||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, parseTraceProcessName(), parseTraceProcessLevel());
|
||||
if (tracerConfig === undefined) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
49
lib/testing-utils.js
generated
49
lib/testing-utils.js
generated
@@ -19,9 +19,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.setupActionsVars = exports.setupTests = void 0;
|
||||
exports.mockFeatureFlagApiEndpoint = exports.getRecordingLogger = exports.setupActionsVars = exports.setupTests = void 0;
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const apiClient = __importStar(require("./api-client"));
|
||||
const CodeQL = __importStar(require("./codeql"));
|
||||
const util_1 = require("./util");
|
||||
function wrapOutput(context) {
|
||||
// Function signature taken from Socket.write.
|
||||
// Note there are two overloads:
|
||||
@@ -89,4 +92,48 @@ function setupActionsVars(tempDir, toolsDir) {
|
||||
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
|
||||
}
|
||||
exports.setupActionsVars = setupActionsVars;
|
||||
function getRecordingLogger(messages) {
|
||||
return {
|
||||
debug: (message) => {
|
||||
messages.push({ type: "debug", message });
|
||||
console.debug(message);
|
||||
},
|
||||
info: (message) => {
|
||||
messages.push({ type: "info", message });
|
||||
console.info(message);
|
||||
},
|
||||
warning: (message) => {
|
||||
messages.push({ type: "warning", message });
|
||||
console.warn(message);
|
||||
},
|
||||
error: (message) => {
|
||||
messages.push({ type: "error", message });
|
||||
console.error(message);
|
||||
},
|
||||
isDebug: () => true,
|
||||
startGroup: () => undefined,
|
||||
endGroup: () => undefined,
|
||||
};
|
||||
}
|
||||
exports.getRecordingLogger = getRecordingLogger;
|
||||
/** Mock the HTTP request to the feature flags enablement API endpoint. */
|
||||
function mockFeatureFlagApiEndpoint(responseStatusCode, response) {
|
||||
// Passing an auth token is required, so we just use a dummy value
|
||||
const client = github.getOctokit("123");
|
||||
const requestSpy = sinon.stub(client, "request");
|
||||
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql-action/features");
|
||||
if (responseStatusCode < 300) {
|
||||
optInSpy.resolves({
|
||||
status: responseStatusCode,
|
||||
data: response,
|
||||
headers: {},
|
||||
url: "GET /repos/:owner/:repo/code-scanning/codeql-action/features",
|
||||
});
|
||||
}
|
||||
else {
|
||||
optInSpy.throws(new util_1.HTTPError("some error message", responseStatusCode));
|
||||
}
|
||||
sinon.stub(apiClient, "getApiClient").value(() => client);
|
||||
}
|
||||
exports.mockFeatureFlagApiEndpoint = mockFeatureFlagApiEndpoint;
|
||||
//# sourceMappingURL=testing-utils.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,6CAA+B;AAE/B,iDAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC"}
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}
|
||||
8
lib/toolrunner-error-catcher.js
generated
8
lib/toolrunner-error-catcher.js
generated
@@ -44,10 +44,6 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
|
||||
if (((_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout) !== undefined) {
|
||||
options.listeners.stdout(data);
|
||||
}
|
||||
else {
|
||||
// if no stdout listener was originally defined then we match default behavior of Toolrunner
|
||||
process.stdout.write(data);
|
||||
}
|
||||
},
|
||||
stderr: (data) => {
|
||||
var _a;
|
||||
@@ -55,10 +51,6 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
|
||||
if (((_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stderr) !== undefined) {
|
||||
options.listeners.stderr(data);
|
||||
}
|
||||
else {
|
||||
// if no stderr listener was originally defined then we match default behavior of Toolrunner
|
||||
process.stderr.write(data);
|
||||
}
|
||||
},
|
||||
};
|
||||
// we capture the original return code or error so that if no match is found we can duplicate the behavior
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7D;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAzED,wDAyEC"}
|
||||
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7D;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAnED,wDAmEC"}
|
||||
2
lib/tracer-config.test.js
generated
2
lib/tracer-config.test.js
generated
@@ -45,6 +45,8 @@ function getTestConfig(tmpDir) {
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
}
|
||||
// A very minimal setup
|
||||
|
||||
File diff suppressed because one or more lines are too long
54
lib/upload-lib.js
generated
54
lib/upload-lib.js
generated
@@ -54,17 +54,14 @@ function combineSarifFiles(sarifFiles) {
|
||||
}
|
||||
combinedSarif.runs.push(...sarifObject.runs);
|
||||
}
|
||||
return JSON.stringify(combinedSarif);
|
||||
return combinedSarif;
|
||||
}
|
||||
exports.combineSarifFiles = combineSarifFiles;
|
||||
// Populates the run.automationDetails.id field using the analysis_key and environment
|
||||
// and return an updated sarif file contents.
|
||||
function populateRunAutomationDetails(sarifContents, category, analysis_key, environment) {
|
||||
if (analysis_key === undefined) {
|
||||
return sarifContents;
|
||||
}
|
||||
function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
|
||||
const automationID = getAutomationID(category, analysis_key, environment);
|
||||
const sarif = JSON.parse(sarifContents);
|
||||
if (automationID !== undefined) {
|
||||
for (const run of sarif.runs || []) {
|
||||
if (run.automationDetails === undefined) {
|
||||
run.automationDetails = {
|
||||
@@ -72,7 +69,9 @@ function populateRunAutomationDetails(sarifContents, category, analysis_key, env
|
||||
};
|
||||
}
|
||||
}
|
||||
return JSON.stringify(sarif);
|
||||
return sarif;
|
||||
}
|
||||
return sarif;
|
||||
}
|
||||
exports.populateRunAutomationDetails = populateRunAutomationDetails;
|
||||
function getAutomationID(category, analysis_key, environment) {
|
||||
@@ -83,7 +82,11 @@ function getAutomationID(category, analysis_key, environment) {
|
||||
}
|
||||
return automationID;
|
||||
}
|
||||
// analysis_key is undefined for the runner.
|
||||
if (analysis_key !== undefined) {
|
||||
return actionsUtil.computeAutomationID(analysis_key, environment);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
// Upload the given payload.
|
||||
// If the request fails then this will retry a small number of times.
|
||||
@@ -244,17 +247,18 @@ exports.buildPayload = buildPayload;
|
||||
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, gitHubVersion, apiDetails, logger) {
|
||||
logger.startGroup("Uploading results");
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
||||
validateUniqueCategory(category);
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
validateSarifFileSchema(file, logger);
|
||||
}
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = await fingerprints.addFingerprints(sarifPayload, sourceRoot, logger);
|
||||
sarifPayload = populateRunAutomationDetails(sarifPayload, category, analysisKey, environment);
|
||||
let sarif = combineSarifFiles(sarifFiles);
|
||||
sarif = await fingerprints.addFingerprints(sarif, sourceRoot, logger);
|
||||
sarif = populateRunAutomationDetails(sarif, category, analysisKey, environment);
|
||||
const toolNames = util.getToolNames(sarif);
|
||||
validateUniqueCategory(sarif);
|
||||
const sarifPayload = JSON.stringify(sarif);
|
||||
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = (0, file_url_1.default)(sourceRoot);
|
||||
const toolNames = util.getToolNames(sarifPayload);
|
||||
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion);
|
||||
// Log some useful debug info about the info
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
@@ -325,17 +329,29 @@ async function waitForProcessing(repositoryNwo, sarifID, apiDetails, logger) {
|
||||
logger.endGroup();
|
||||
}
|
||||
exports.waitForProcessing = waitForProcessing;
|
||||
function validateUniqueCategory(category) {
|
||||
if (util.isActions()) {
|
||||
function validateUniqueCategory(sarif) {
|
||||
var _a, _b, _c;
|
||||
// This check only works on actions as env vars don't persist between calls to the runner
|
||||
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF${category ? `_${sanitize(category)}` : ""}`;
|
||||
if (util.isActions()) {
|
||||
// duplicate categories are allowed in the same sarif file
|
||||
// but not across multiple sarif files
|
||||
const categories = {};
|
||||
for (const run of sarif.runs) {
|
||||
const id = (_a = run === null || run === void 0 ? void 0 : run.automationDetails) === null || _a === void 0 ? void 0 : _a.id;
|
||||
const tool = (_c = (_b = run.tool) === null || _b === void 0 ? void 0 : _b.driver) === null || _c === void 0 ? void 0 : _c.name;
|
||||
const category = `${sanitize(id)}_${sanitize(tool)}`;
|
||||
categories[category] = { id, tool };
|
||||
}
|
||||
for (const [category, { id, tool }] of Object.entries(categories)) {
|
||||
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF_${category}`;
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per category. " +
|
||||
"Please specify a unique `category` to call this action multiple times. " +
|
||||
`Category: ${category ? category : "(none)"}`);
|
||||
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. " +
|
||||
"The easiest fix is to specify a unique value for the `category` input. " +
|
||||
`Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})`);
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.validateUniqueCategory = validateUniqueCategory;
|
||||
/**
|
||||
@@ -348,6 +364,6 @@ exports.validateUniqueCategory = validateUniqueCategory;
|
||||
* @param str the initial value to sanitize
|
||||
*/
|
||||
function sanitize(str) {
|
||||
return str.replace(/[^a-zA-Z0-9_]/g, "_");
|
||||
return (str !== null && str !== void 0 ? str : "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
|
||||
}
|
||||
//# sourceMappingURL=upload-lib.js.map
|
||||
File diff suppressed because one or more lines are too long
110
lib/upload-lib.test.js
generated
110
lib/upload-lib.test.js
generated
@@ -98,9 +98,13 @@ ava_1.default.beforeEach(() => {
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("populateRunAutomationDetails", (t) => {
|
||||
let sarif = '{"runs": [{}]}';
|
||||
let sarif = {
|
||||
runs: [{}],
|
||||
};
|
||||
const analysisKey = ".github/workflows/codeql-analysis.yml:analyze";
|
||||
let expectedSarif = '{"runs":[{"automationDetails":{"id":"language:javascript/os:linux/"}}]}';
|
||||
let expectedSarif = {
|
||||
runs: [{ automationDetails: { id: "language:javascript/os:linux/" } }],
|
||||
};
|
||||
// Category has priority over analysis_key/environment
|
||||
let modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, "language:javascript/os:linux", analysisKey, '{"language": "other", "os": "other"}');
|
||||
t.deepEqual(modifiedSarif, expectedSarif);
|
||||
@@ -108,25 +112,99 @@ ava_1.default.beforeEach(() => {
|
||||
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, "language:javascript/os:linux/", analysisKey, "");
|
||||
t.deepEqual(modifiedSarif, expectedSarif);
|
||||
// check that the automation details doesn't get overwritten
|
||||
sarif = '{"runs":[{"automationDetails":{"id":"my_id"}}]}';
|
||||
expectedSarif = '{"runs":[{"automationDetails":{"id":"my_id"}}]}';
|
||||
sarif = { runs: [{ automationDetails: { id: "my_id" } }] };
|
||||
expectedSarif = { runs: [{ automationDetails: { id: "my_id" } }] };
|
||||
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, undefined, analysisKey, '{"os": "linux", "language": "javascript"}');
|
||||
t.deepEqual(modifiedSarif, expectedSarif);
|
||||
// check multiple runs
|
||||
sarif = { runs: [{ automationDetails: { id: "my_id" } }, {}] };
|
||||
expectedSarif = {
|
||||
runs: [
|
||||
{ automationDetails: { id: "my_id" } },
|
||||
{
|
||||
automationDetails: {
|
||||
id: ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/",
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, undefined, analysisKey, '{"os": "linux", "language": "javascript"}');
|
||||
t.deepEqual(modifiedSarif, expectedSarif);
|
||||
});
|
||||
(0, ava_1.default)("validateUniqueCategory", (t) => {
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(undefined));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(undefined));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc"));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("def"));
|
||||
(0, ava_1.default)("validateUniqueCategory when empty", (t) => {
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif()));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif()));
|
||||
});
|
||||
(0, ava_1.default)("validateUniqueCategory for automation details id", (t) => {
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("AbC")));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("def")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("def")));
|
||||
// Our category sanitization is not perfect. Here are some examples
|
||||
// of where we see false clashes
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc/def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc@def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc_def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc def"));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc/def")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc@def")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc_def")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc def")));
|
||||
// this one is fine
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc_ def"));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc_ def")));
|
||||
});
|
||||
(0, ava_1.default)("validateUniqueCategory for tool name", (t) => {
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "AbC")));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "def")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "def")));
|
||||
// Our category sanitization is not perfect. Here are some examples
|
||||
// of where we see false clashes
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc/def")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc@def")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc_def")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif(undefined, "abc def")));
|
||||
// this one is fine
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc_ def")));
|
||||
});
|
||||
(0, ava_1.default)("validateUniqueCategory for automation details id and tool name", (t) => {
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "abc")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "abc")));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc_", "def")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc_", "def")));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("ghi", "_jkl")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("ghi", "_jkl")));
|
||||
// Our category sanitization is not perfect. Here are some examples
|
||||
// of where we see false clashes
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "_")));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("abc", "def__")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("abc_def")));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(createMockSarif("mno_", "pqr")));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(createMockSarif("mno", "_pqr")));
|
||||
});
|
||||
(0, ava_1.default)("validateUniqueCategory for multiple runs", (t) => {
|
||||
const sarif1 = createMockSarif("abc", "def");
|
||||
const sarif2 = createMockSarif("ghi", "jkl");
|
||||
// duplicate categories are allowed within the same sarif file
|
||||
const multiSarif = { runs: [sarif1.runs[0], sarif1.runs[0], sarif2.runs[0]] };
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(multiSarif));
|
||||
// should throw if there are duplicate categories in separate validations
|
||||
t.throws(() => uploadLib.validateUniqueCategory(sarif1));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(sarif2));
|
||||
});
|
||||
function createMockSarif(id, tool) {
|
||||
return {
|
||||
runs: [
|
||||
{
|
||||
automationDetails: {
|
||||
id,
|
||||
},
|
||||
tool: {
|
||||
driver: {
|
||||
name: tool,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=upload-lib.test.js.map
|
||||
File diff suppressed because one or more lines are too long
25
lib/util.js
generated
25
lib/util.js
generated
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
exports.checkNotWindows11 = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
@@ -42,9 +42,13 @@ const BROKEN_VERSIONS = ["0.0.0-20211207"];
|
||||
*/
|
||||
exports.GITHUB_DOTCOM_URL = "https://github.com";
|
||||
/**
|
||||
* Name of the debugging artifact.
|
||||
* Default name of the debugging artifact.
|
||||
*/
|
||||
exports.DEBUG_ARTIFACT_NAME = "debug-artifacts";
|
||||
exports.DEFAULT_DEBUG_ARTIFACT_NAME = "debug-artifacts";
|
||||
/**
|
||||
* Default name of the database in the debugging artifact.
|
||||
*/
|
||||
exports.DEFAULT_DEBUG_DATABASE_NAME = "db";
|
||||
/**
|
||||
* Get the extra options for the codeql commands.
|
||||
*/
|
||||
@@ -68,8 +72,7 @@ exports.getExtraOptionsEnvParam = getExtraOptionsEnvParam;
|
||||
*
|
||||
* Returns an array of unique string tool names.
|
||||
*/
|
||||
function getToolNames(sarifContents) {
|
||||
const sarif = JSON.parse(sarifContents);
|
||||
function getToolNames(sarif) {
|
||||
const toolNames = {};
|
||||
for (const run of sarif.runs || []) {
|
||||
const tool = run.tool || {};
|
||||
@@ -492,9 +495,9 @@ async function codeQlVersionAbove(codeql, requiredVersion) {
|
||||
}
|
||||
exports.codeQlVersionAbove = codeQlVersionAbove;
|
||||
// Create a bundle for the given DB, if it doesn't already exist
|
||||
async function bundleDb(config, language, codeql) {
|
||||
async function bundleDb(config, language, codeql, dbName) {
|
||||
const databasePath = getCodeQLDatabasePath(config, language);
|
||||
const databaseBundlePath = path.resolve(config.dbLocation, `${databasePath}.zip`);
|
||||
const databaseBundlePath = path.resolve(config.dbLocation, `${dbName}.zip`);
|
||||
// For a tiny bit of added safety, delete the file if it exists.
|
||||
// The file is probably from an earlier call to this function, either
|
||||
// as part of this action step or a previous one, but it could also be
|
||||
@@ -503,7 +506,7 @@ async function bundleDb(config, language, codeql) {
|
||||
if (fs.existsSync(databaseBundlePath)) {
|
||||
await (0, del_1.default)(databaseBundlePath, { force: true });
|
||||
}
|
||||
await codeql.databaseBundle(databasePath, databaseBundlePath);
|
||||
await codeql.databaseBundle(databasePath, databaseBundlePath, dbName);
|
||||
return databaseBundlePath;
|
||||
}
|
||||
exports.bundleDb = bundleDb;
|
||||
@@ -515,4 +518,10 @@ function isGoodVersion(versionSpec) {
|
||||
return !BROKEN_VERSIONS.includes(versionSpec);
|
||||
}
|
||||
exports.isGoodVersion = isGoodVersion;
|
||||
function checkNotWindows11() {
|
||||
if (os.platform() === "win32" && semver.gte(os.release(), "10.0.20348")) {
|
||||
throw new Error("Tracing builds with CodeQL is currently not supported on Windows 11 and Windows Server 2022. Please modify your Actions workflow to use an earlier version of Windows for this job, for example by setting `runs-on: windows-2019`.");
|
||||
}
|
||||
}
|
||||
exports.checkNotWindows11 = checkNotWindows11;
|
||||
//# sourceMappingURL=util.js.map
|
||||
File diff suppressed because one or more lines are too long
2
lib/util.test.js
generated
2
lib/util.test.js
generated
@@ -35,7 +35,7 @@ const util = __importStar(require("./util"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("getToolNames", (t) => {
|
||||
const input = fs.readFileSync(`${__dirname}/../src/testdata/tool-names.sarif`, "utf8");
|
||||
const toolNames = util.getToolNames(input);
|
||||
const toolNames = util.getToolNames(JSON.parse(input));
|
||||
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
||||
});
|
||||
(0, ava_1.default)("getMemoryFlag() should return the correct --ram flag", (t) => {
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
node_modules/.package-lock.json
generated
vendored
2
node_modules/.package-lock.json
generated
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "1.0.26",
|
||||
"version": "1.0.29",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "1.0.26",
|
||||
"version": "1.0.29",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "codeql",
|
||||
"version": "1.0.26",
|
||||
"version": "1.0.29",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^0.5.2",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "1.0.26",
|
||||
"version": "1.0.29",
|
||||
"private": true,
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
|
||||
@@ -2,12 +2,12 @@ name: "Debug artifact upload"
|
||||
description: "Checks that debugging artifacts are correctly uploaded"
|
||||
os: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
debug: true
|
||||
debug-artifact-name: "my-debug-artifacts"
|
||||
debug-database-name: "my-db"
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
@@ -15,7 +15,7 @@ steps:
|
||||
id: analysis
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
|
||||
name: my-debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
|
||||
- shell: bash
|
||||
run: |
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
@@ -25,7 +25,7 @@ steps:
|
||||
echo "Missing a SARIF file for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -f "$language.zip" ]] ; then
|
||||
if [[ ! -f "my-db-$language.zip" ]] ; then
|
||||
echo "Missing a database bundle for $language"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
name: "Go: Custom queries"
|
||||
description: "Checks that Go works in conjunction with a config file specifying custom queries"
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: "^1.13.1"
|
||||
|
||||
@@ -2,8 +2,6 @@ name: "Multi-language repository"
|
||||
description: "An end-to-end integration test of a multi-language repository using automatic language detection"
|
||||
os: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: "${{ runner.temp }}/customDbLocation"
|
||||
|
||||
@@ -3,8 +3,6 @@ description: "Checks that specifying packages using a combination of a config fi
|
||||
versions: ["nightly-20210831"] # This CLI version is known to work with package used in this test
|
||||
os: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: ".github/codeql/codeql-config-packaging3.yml"
|
||||
|
||||
@@ -3,8 +3,6 @@ description: "Checks that specifying packages using only a config file works"
|
||||
versions: ["nightly-20210831"] # This CLI version is known to work with package used in this test
|
||||
os: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: ".github/codeql/codeql-config-packaging.yml"
|
||||
|
||||
@@ -3,8 +3,6 @@ description: "Checks that specifying packages using the input to the Action work
|
||||
versions: ["nightly-20210831"] # This CLI version is known to work with package used in this test
|
||||
os: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: ".github/codeql/codeql-config-packaging2.yml"
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
name: "Remote config file"
|
||||
description: "Checks that specifying packages using only a config file works"
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
@@ -3,8 +3,6 @@ description: "Tests a split-up workflow in which we first build a database and l
|
||||
versions: ["nightly-20210831"] # This CLI version is known to work with package used in this test
|
||||
os: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: ".github/codeql/codeql-config-packaging3.yml"
|
||||
|
||||
@@ -3,8 +3,6 @@ description: "Tests using a CodeQL bundle from a local file rather than a URL"
|
||||
versions: ["nightly-latest"]
|
||||
os: ["ubuntu-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- name: Fetch a CodeQL bundle
|
||||
shell: bash
|
||||
env:
|
||||
|
||||
@@ -2,8 +2,6 @@ name: "Test unsetting environment variables"
|
||||
description: "An end-to-end integration test that unsets some environment variables"
|
||||
os: ["ubuntu-latest"]
|
||||
steps:
|
||||
- name: Initialize dotnet
|
||||
run: dotnet restore
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: "${{ runner.temp }}/customDbLocation"
|
||||
|
||||
@@ -5,8 +5,8 @@ py -3 -m pip install --user --upgrade pip setuptools wheel
|
||||
|
||||
# virtualenv is a bit nicer for setting up virtual environment, since it will provide up-to-date versions of
|
||||
# pip/setuptools/wheel which basic `python3 -m venv venv` won't
|
||||
py -2 -m pip install --user virtualenv
|
||||
py -3 -m pip install --user virtualenv
|
||||
py -2 -m pip install --user 'virtualenv<20.11'
|
||||
py -3 -m pip install --user 'virtualenv<20.11'
|
||||
|
||||
# poetry 1.0.10 has error (https://github.com/python-poetry/poetry/issues/2711)
|
||||
py -3 -m pip install --user poetry!=1.0.10
|
||||
|
||||
@@ -15,7 +15,7 @@ python3 -m pip install --user --upgrade pip setuptools wheel
|
||||
|
||||
# virtualenv is a bit nicer for setting up virtual environment, since it will provide up-to-date versions of
|
||||
# pip/setuptools/wheel which basic `python3 -m venv venv` won't
|
||||
python3 -m pip install --user virtualenv
|
||||
python3 -m pip install --user 'virtualenv<20.11'
|
||||
|
||||
# We install poetry with pip instead of the recommended way, since the recommended way
|
||||
# caused some problem since `poetry run` gives output like:
|
||||
@@ -35,5 +35,5 @@ if command -v python2 &> /dev/null; then
|
||||
|
||||
python2 -m pip install --user --upgrade pip setuptools wheel
|
||||
|
||||
python2 -m pip install --user virtualenv
|
||||
python2 -m pip install --user 'virtualenv<20.11'
|
||||
fi
|
||||
|
||||
2
runner/package-lock.json
generated
2
runner/package-lock.json
generated
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql-runner",
|
||||
"version": "1.0.26",
|
||||
"version": "1.0.29",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql-runner",
|
||||
"version": "1.0.26",
|
||||
"version": "1.0.29",
|
||||
"private": true,
|
||||
"description": "CodeQL runner",
|
||||
"scripts": {
|
||||
|
||||
@@ -23,6 +23,8 @@ test("emptyPaths", async (t) => {
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
@@ -46,6 +48,8 @@ test("nonEmptyPaths", async (t) => {
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||
@@ -73,6 +77,8 @@ test("exclude temp dir", async (t) => {
|
||||
dbLocation: path.resolve(tempDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
|
||||
@@ -29,14 +29,14 @@ export function printPathFiltersWarning(
|
||||
config: configUtils.Config,
|
||||
logger: Logger
|
||||
) {
|
||||
// Index include/exclude/filters only work in javascript and python.
|
||||
// Index include/exclude/filters only work in javascript/python/ruby.
|
||||
// If any other languages are detected/configured then show a warning.
|
||||
if (
|
||||
(config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
|
||||
!config.languages.every(isInterpretedLanguage)
|
||||
) {
|
||||
logger.warning(
|
||||
'The "paths"/"paths-ignore" fields of the config only have effect for JavaScript and Python'
|
||||
'The "paths"/"paths-ignore" fields of the config only have effect for JavaScript, Python, and Ruby'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,11 @@ import * as sinon from "sinon";
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import * as analyze from "./analyze";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { setupTests, setupActionsVars } from "./testing-utils";
|
||||
import {
|
||||
setupTests,
|
||||
setupActionsVars,
|
||||
mockFeatureFlagApiEndpoint,
|
||||
} from "./testing-utils";
|
||||
import * as util from "./util";
|
||||
|
||||
setupTests(test);
|
||||
@@ -25,6 +29,7 @@ test("analyze action with RAM & threads from environment variables", async (t) =
|
||||
.resolves({} as actionsUtil.StatusReportBase);
|
||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
languages: [],
|
||||
} as unknown as configUtils.Config);
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
@@ -33,6 +38,7 @@ test("analyze action with RAM & threads from environment variables", async (t) =
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
mockFeatureFlagApiEndpoint(200, {});
|
||||
|
||||
// When there are no action inputs for RAM and threads, the action uses
|
||||
// environment variables (passed down from the init action) to set RAM and
|
||||
|
||||
@@ -4,7 +4,11 @@ import * as sinon from "sinon";
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import * as analyze from "./analyze";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { setupTests, setupActionsVars } from "./testing-utils";
|
||||
import {
|
||||
setupTests,
|
||||
setupActionsVars,
|
||||
mockFeatureFlagApiEndpoint,
|
||||
} from "./testing-utils";
|
||||
import * as util from "./util";
|
||||
|
||||
setupTests(test);
|
||||
@@ -25,6 +29,7 @@ test("analyze action with RAM & threads from action inputs", async (t) => {
|
||||
.resolves({} as actionsUtil.StatusReportBase);
|
||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
languages: [],
|
||||
} as unknown as configUtils.Config);
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
@@ -33,6 +38,7 @@ test("analyze action with RAM & threads from action inputs", async (t) => {
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
mockFeatureFlagApiEndpoint(200, {});
|
||||
|
||||
process.env["CODEQL_THREADS"] = "1";
|
||||
process.env["CODEQL_RAM"] = "4992";
|
||||
|
||||
@@ -15,12 +15,13 @@ import {
|
||||
import { CODEQL_VERSION_NEW_TRACING, getCodeQL } from "./codeql";
|
||||
import { Config, getConfig } from "./config-utils";
|
||||
import { uploadDatabases } from "./database-upload";
|
||||
import { GitHubFeatureFlags } from "./feature-flags";
|
||||
import { getActionsLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import * as upload_lib from "./upload-lib";
|
||||
import { UploadResult } from "./upload-lib";
|
||||
import * as util from "./util";
|
||||
import { bundleDb, codeQlVersionAbove, DEBUG_ARTIFACT_NAME } from "./util";
|
||||
import { bundleDb, codeQlVersionAbove } from "./util";
|
||||
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
@@ -99,6 +100,23 @@ async function run() {
|
||||
const memory = util.getMemoryFlag(
|
||||
actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]
|
||||
);
|
||||
|
||||
const repositoryNwo = parseRepositoryNwo(
|
||||
util.getRequiredEnvParam("GITHUB_REPOSITORY")
|
||||
);
|
||||
|
||||
const featureFlags = new GitHubFeatureFlags(
|
||||
config.gitHubVersion,
|
||||
apiDetails,
|
||||
repositoryNwo,
|
||||
logger
|
||||
);
|
||||
// We currently perform an API request in both the `init` and `analyze` Actions to determine
|
||||
// what feature flags are enabled. At the time of writing, this redundant API call is acceptable
|
||||
// to us, but if we wanted to avoid it, we could do so by serializing the feature flags as part
|
||||
// of the config file.
|
||||
void featureFlags.preloadFeatureFlags();
|
||||
|
||||
await runFinalize(outputDir, threads, memory, config, logger);
|
||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||
runStats = await runQueries(
|
||||
@@ -117,7 +135,8 @@ async function run() {
|
||||
config.languages.map((lang) =>
|
||||
path.resolve(outputDir, `${lang}.sarif`)
|
||||
),
|
||||
outputDir
|
||||
outputDir,
|
||||
config.debugArtifactName
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -138,12 +157,17 @@ async function run() {
|
||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||
toUpload.push(...listFolder(path.resolve(config.dbLocation, "log")));
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation);
|
||||
await uploadDebugArtifacts(
|
||||
toUpload,
|
||||
config.dbLocation,
|
||||
config.debugArtifactName
|
||||
);
|
||||
if (!(await codeQlVersionAbove(codeql, CODEQL_VERSION_NEW_TRACING))) {
|
||||
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
|
||||
await uploadDebugArtifacts(
|
||||
[path.resolve(config.tempDir, "compound-build-tracer.log")],
|
||||
config.tempDir
|
||||
config.tempDir,
|
||||
config.debugArtifactName
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -173,10 +197,14 @@ async function run() {
|
||||
logger.info("Not uploading results");
|
||||
}
|
||||
|
||||
const repositoryNwo = parseRepositoryNwo(
|
||||
util.getRequiredEnvParam("GITHUB_REPOSITORY")
|
||||
// Possibly upload the database bundles for remote queries
|
||||
await uploadDatabases(
|
||||
repositoryNwo,
|
||||
config,
|
||||
featureFlags,
|
||||
apiDetails,
|
||||
logger
|
||||
);
|
||||
await uploadDatabases(repositoryNwo, config, apiDetails, logger); // Possibly upload the database bundles for remote queries
|
||||
|
||||
if (
|
||||
uploadResult !== undefined &&
|
||||
@@ -189,14 +217,6 @@ async function run() {
|
||||
getActionsLogger()
|
||||
);
|
||||
}
|
||||
|
||||
if (config.debugMode) {
|
||||
// Upload the database bundles as an Actions artifact for debugging
|
||||
const toUpload: string[] = [];
|
||||
for (const language of config.languages)
|
||||
toUpload.push(await bundleDb(config, language, codeql));
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation);
|
||||
}
|
||||
} catch (origError) {
|
||||
const error =
|
||||
origError instanceof Error ? origError : new Error(String(origError));
|
||||
@@ -212,6 +232,30 @@ async function run() {
|
||||
|
||||
return;
|
||||
} finally {
|
||||
if (config !== undefined && config.debugMode) {
|
||||
try {
|
||||
// Upload the database bundles as an Actions artifact for debugging
|
||||
const toUpload: string[] = [];
|
||||
for (const language of config.languages) {
|
||||
toUpload.push(
|
||||
await bundleDb(
|
||||
config,
|
||||
language,
|
||||
await getCodeQL(config.codeQLCmd),
|
||||
`${config.debugDatabaseName}-${language}`
|
||||
)
|
||||
);
|
||||
}
|
||||
await uploadDebugArtifacts(
|
||||
toUpload,
|
||||
config.dbLocation,
|
||||
config.debugArtifactName
|
||||
);
|
||||
} catch (error) {
|
||||
console.log(`Failed to upload database debug bundles: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (core.isDebug() && config !== undefined) {
|
||||
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||
for (const language of config.languages) {
|
||||
@@ -251,7 +295,11 @@ async function run() {
|
||||
}
|
||||
}
|
||||
|
||||
async function uploadDebugArtifacts(toUpload: string[], rootDir: string) {
|
||||
async function uploadDebugArtifacts(
|
||||
toUpload: string[],
|
||||
rootDir: string,
|
||||
artifactName: string
|
||||
) {
|
||||
let suffix = "";
|
||||
const matrix = actionsUtil.getRequiredInput("matrix");
|
||||
if (matrix !== undefined && matrix !== "null") {
|
||||
@@ -259,7 +307,7 @@ async function uploadDebugArtifacts(toUpload: string[], rootDir: string) {
|
||||
suffix += `-${entry[1]}`;
|
||||
}
|
||||
await artifact.create().uploadArtifact(
|
||||
actionsUtil.sanitizeArifactName(`${DEBUG_ARTIFACT_NAME}${suffix}`),
|
||||
actionsUtil.sanitizeArifactName(`${artifactName}${suffix}`),
|
||||
toUpload.map((file) => path.normalize(file)),
|
||||
path.normalize(rootDir)
|
||||
);
|
||||
|
||||
@@ -120,6 +120,8 @@ test("status report fields and search path setting", async (t) => {
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs,
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||
recursive: true,
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"maximumVersion": "3.3", "minimumVersion": "3.0"}
|
||||
{"maximumVersion": "3.4", "minimumVersion": "3.0"}
|
||||
|
||||
@@ -127,7 +127,11 @@ export interface CodeQL {
|
||||
/**
|
||||
* Run 'codeql database bundle'.
|
||||
*/
|
||||
databaseBundle(databasePath: string, outputFilePath: string): Promise<void>;
|
||||
databaseBundle(
|
||||
databasePath: string,
|
||||
outputFilePath: string,
|
||||
dbName: string
|
||||
): Promise<void>;
|
||||
/**
|
||||
* Run 'codeql database run-queries'.
|
||||
*/
|
||||
@@ -215,6 +219,7 @@ const CODEQL_VERSION_GROUP_RULES = "2.5.5";
|
||||
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
|
||||
export const CODEQL_VERSION_COUNTS_LINES = "2.6.2";
|
||||
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
|
||||
export const CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
|
||||
|
||||
/**
|
||||
* This variable controls using the new style of tracing from the CodeQL
|
||||
@@ -956,13 +961,15 @@ async function getCodeQLForCmd(
|
||||
},
|
||||
async databaseBundle(
|
||||
databasePath: string,
|
||||
outputFilePath: string
|
||||
outputFilePath: string,
|
||||
databaseName: string
|
||||
): Promise<void> {
|
||||
const args = [
|
||||
"database",
|
||||
"bundle",
|
||||
databasePath,
|
||||
`--output=${outputFilePath}`,
|
||||
`--name=${databaseName}`,
|
||||
];
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
|
||||
@@ -9,6 +9,7 @@ import * as sinon from "sinon";
|
||||
import * as api from "./api-client";
|
||||
import { getCachedCodeQL, setCodeQL } from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { createFeatureFlags, FeatureFlag } from "./feature-flags";
|
||||
import { Language } from "./languages";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { setupTests } from "./testing-utils";
|
||||
@@ -86,6 +87,8 @@ test("load empty config", async (t) => {
|
||||
undefined,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -93,6 +96,7 @@ test("load empty config", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
logger
|
||||
);
|
||||
|
||||
@@ -104,6 +108,8 @@ test("load empty config", async (t) => {
|
||||
undefined,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -111,6 +117,7 @@ test("load empty config", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
logger
|
||||
)
|
||||
);
|
||||
@@ -147,6 +154,8 @@ test("loading config saves config", async (t) => {
|
||||
undefined,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -154,6 +163,7 @@ test("loading config saves config", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
logger
|
||||
);
|
||||
|
||||
@@ -176,6 +186,8 @@ test("load input outside of workspace", async (t) => {
|
||||
"../input",
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -183,6 +195,7 @@ test("load input outside of workspace", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
throw new Error("initConfig did not throw error");
|
||||
@@ -212,6 +225,8 @@ test("load non-local input with invalid repo syntax", async (t) => {
|
||||
configFile,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -219,6 +234,7 @@ test("load non-local input with invalid repo syntax", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
throw new Error("initConfig did not throw error");
|
||||
@@ -249,6 +265,8 @@ test("load non-existent input", async (t) => {
|
||||
configFile,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -256,6 +274,7 @@ test("load non-existent input", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
throw new Error("initConfig did not throw error");
|
||||
@@ -333,6 +352,8 @@ test("load non-empty input", async (t) => {
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs: {} as configUtils.Packs,
|
||||
debugMode: false,
|
||||
debugArtifactName: "my-artifact",
|
||||
debugDatabaseName: "my-db",
|
||||
};
|
||||
|
||||
const languages = "javascript";
|
||||
@@ -345,6 +366,8 @@ test("load non-empty input", async (t) => {
|
||||
configFilePath,
|
||||
undefined,
|
||||
false,
|
||||
"my-artifact",
|
||||
"my-db",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -352,6 +375,7 @@ test("load non-empty input", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
|
||||
@@ -409,6 +433,8 @@ test("Default queries are used", async (t) => {
|
||||
configFilePath,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -416,6 +442,7 @@ test("Default queries are used", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
|
||||
@@ -481,6 +508,8 @@ test("Queries can be specified in config file", async (t) => {
|
||||
configFilePath,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -488,6 +517,7 @@ test("Queries can be specified in config file", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
|
||||
@@ -547,6 +577,8 @@ test("Queries from config file can be overridden in workflow file", async (t) =>
|
||||
configFilePath,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -554,6 +586,7 @@ test("Queries from config file can be overridden in workflow file", async (t) =>
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
|
||||
@@ -611,6 +644,8 @@ test("Queries in workflow file can be used in tandem with the 'disable default q
|
||||
configFilePath,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -618,6 +653,7 @@ test("Queries in workflow file can be used in tandem with the 'disable default q
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
|
||||
@@ -668,6 +704,8 @@ test("Multiple queries can be specified in workflow file, no config file require
|
||||
undefined,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -675,6 +713,7 @@ test("Multiple queries can be specified in workflow file, no config file require
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
|
||||
@@ -746,6 +785,8 @@ test("Queries in workflow file can be added to the set of queries without overri
|
||||
configFilePath,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -753,6 +794,7 @@ test("Queries in workflow file can be added to the set of queries without overri
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
|
||||
@@ -814,6 +856,8 @@ test("Invalid queries in workflow file handled correctly", async (t) => {
|
||||
undefined,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -821,6 +865,7 @@ test("Invalid queries in workflow file handled correctly", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
t.fail("initConfig did not throw error");
|
||||
@@ -879,6 +924,8 @@ test("API client used when reading remote config", async (t) => {
|
||||
configFile,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -886,6 +933,7 @@ test("API client used when reading remote config", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
t.assert(spyGetContents.called);
|
||||
@@ -906,6 +954,8 @@ test("Remote config handles the case where a directory is provided", async (t) =
|
||||
repoReference,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -913,6 +963,7 @@ test("Remote config handles the case where a directory is provided", async (t) =
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
throw new Error("initConfig did not throw error");
|
||||
@@ -941,6 +992,8 @@ test("Invalid format of remote config handled correctly", async (t) => {
|
||||
repoReference,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -948,6 +1001,7 @@ test("Invalid format of remote config handled correctly", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
throw new Error("initConfig did not throw error");
|
||||
@@ -977,6 +1031,8 @@ test("No detected languages", async (t) => {
|
||||
undefined,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -984,6 +1040,7 @@ test("No detected languages", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
throw new Error("initConfig did not throw error");
|
||||
@@ -1005,6 +1062,8 @@ test("Unknown languages", async (t) => {
|
||||
undefined,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -1012,6 +1071,7 @@ test("Unknown languages", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
throw new Error("initConfig did not throw error");
|
||||
@@ -1055,6 +1115,8 @@ test("Config specifies packages", async (t) => {
|
||||
configFile,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -1062,6 +1124,7 @@ test("Config specifies packages", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
t.deepEqual(packs as unknown, {
|
||||
@@ -1114,6 +1177,8 @@ test("Config specifies packages for multiple languages", async (t) => {
|
||||
configFile,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example" },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -1121,6 +1186,7 @@ test("Config specifies packages for multiple languages", async (t) => {
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
t.deepEqual(packs as unknown, {
|
||||
@@ -1189,6 +1255,8 @@ function doInvalidInputTest(
|
||||
configFile,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
@@ -1196,6 +1264,7 @@ function doInvalidInputTest(
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags([]),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
throw new Error("initConfig did not throw error");
|
||||
@@ -1631,5 +1700,88 @@ test(
|
||||
/"xxx" is not a valid pack/
|
||||
);
|
||||
|
||||
// errors
|
||||
// input w invalid pack name
|
||||
async function mlPoweredQueriesMacro(
|
||||
t: ExecutionContext,
|
||||
codeQLVersion: string,
|
||||
isMlPoweredQueriesFlagEnabled: boolean,
|
||||
queriesInput: string | undefined,
|
||||
shouldRunMlPoweredQueries: boolean
|
||||
) {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const codeQL = setCodeQL({
|
||||
async getVersion() {
|
||||
return codeQLVersion;
|
||||
},
|
||||
async resolveQueries() {
|
||||
return {
|
||||
byLanguage: {
|
||||
javascript: { "fake-query.ql": {} },
|
||||
},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
const { packs } = await configUtils.initConfig(
|
||||
"javascript",
|
||||
queriesInput,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
false,
|
||||
"",
|
||||
"",
|
||||
{ owner: "github", repo: "example " },
|
||||
tmpDir,
|
||||
tmpDir,
|
||||
codeQL,
|
||||
tmpDir,
|
||||
gitHubVersion,
|
||||
sampleApiDetails,
|
||||
createFeatureFlags(
|
||||
isMlPoweredQueriesFlagEnabled
|
||||
? [FeatureFlag.MlPoweredQueriesEnabled]
|
||||
: []
|
||||
),
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
if (shouldRunMlPoweredQueries) {
|
||||
t.deepEqual(packs as unknown, {
|
||||
[Language.javascript]: [
|
||||
{
|
||||
packName: "codeql/javascript-experimental-atm-queries",
|
||||
version: "~0.0.2",
|
||||
},
|
||||
],
|
||||
});
|
||||
} else {
|
||||
t.deepEqual(packs as unknown, {});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
mlPoweredQueriesMacro.title = (
|
||||
_providedTitle: string,
|
||||
codeQLVersion: string,
|
||||
isMlPoweredQueriesFlagEnabled: boolean,
|
||||
queriesInput: string | undefined,
|
||||
shouldRunMlPoweredQueries: boolean
|
||||
) => {
|
||||
const queriesInputDescription = queriesInput
|
||||
? `'queries: ${queriesInput}'`
|
||||
: "default config";
|
||||
|
||||
return `ML-powered queries ${
|
||||
shouldRunMlPoweredQueries ? "are" : "aren't"
|
||||
} loaded for ${queriesInputDescription} using CLI v${codeQLVersion} when feature flag is ${
|
||||
isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"
|
||||
}`;
|
||||
};
|
||||
|
||||
// macro, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries
|
||||
test(mlPoweredQueriesMacro, "2.7.4", true, "security-extended", false);
|
||||
test(mlPoweredQueriesMacro, "2.7.5", false, "security-extended", false);
|
||||
test(mlPoweredQueriesMacro, "2.7.5", true, undefined, false);
|
||||
test(mlPoweredQueriesMacro, "2.7.5", true, "security-extended", true);
|
||||
test(mlPoweredQueriesMacro, "2.7.5", true, "security-and-quality", true);
|
||||
|
||||
@@ -5,12 +5,17 @@ import * as yaml from "js-yaml";
|
||||
import * as semver from "semver";
|
||||
|
||||
import * as api from "./api-client";
|
||||
import { CodeQL, ResolveQueriesOutput } from "./codeql";
|
||||
import {
|
||||
CodeQL,
|
||||
CODEQL_VERSION_ML_POWERED_QUERIES,
|
||||
ResolveQueriesOutput,
|
||||
} from "./codeql";
|
||||
import * as externalQueries from "./external-queries";
|
||||
import { FeatureFlag, FeatureFlags } from "./feature-flags";
|
||||
import { Language, parseLanguage } from "./languages";
|
||||
import { Logger } from "./logging";
|
||||
import { RepositoryNwo } from "./repository";
|
||||
import { GitHubVersion } from "./util";
|
||||
import { codeQlVersionAbove, GitHubVersion } from "./util";
|
||||
|
||||
// Property names from the user-supplied config file.
|
||||
const NAME_PROPERTY = "name";
|
||||
@@ -130,6 +135,14 @@ export interface Config {
|
||||
* output for debugging purposes when possible.
|
||||
*/
|
||||
debugMode: boolean;
|
||||
/**
|
||||
* Specifies the name of the debugging artifact if we are in debug mode.
|
||||
*/
|
||||
debugArtifactName: string;
|
||||
/**
|
||||
* Specifies the name of the database in the debugging artifact.
|
||||
*/
|
||||
debugDatabaseName: string;
|
||||
}
|
||||
|
||||
export type Packs = Partial<Record<Language, PackWithVersion[]>>;
|
||||
@@ -262,7 +275,9 @@ async function addBuiltinSuiteQueries(
|
||||
languages: string[],
|
||||
codeQL: CodeQL,
|
||||
resultMap: Queries,
|
||||
packs: Packs,
|
||||
suiteName: string,
|
||||
featureFlags: FeatureFlags,
|
||||
configFile?: string
|
||||
) {
|
||||
const found = builtinSuites.find((suite) => suite === suiteName);
|
||||
@@ -270,6 +285,24 @@ async function addBuiltinSuiteQueries(
|
||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||
}
|
||||
|
||||
// If we're running the JavaScript security-extended analysis (or a superset of it) and the repo
|
||||
// is opted into the ML-powered queries beta, then add the ML-powered query pack so that we run
|
||||
// the ML-powered queries.
|
||||
if (
|
||||
languages.includes("javascript") &&
|
||||
(found === "security-extended" || found === "security-and-quality") &&
|
||||
(await featureFlags.getValue(FeatureFlag.MlPoweredQueriesEnabled)) &&
|
||||
(await codeQlVersionAbove(codeQL, CODEQL_VERSION_ML_POWERED_QUERIES))
|
||||
) {
|
||||
if (!packs.javascript) {
|
||||
packs.javascript = [];
|
||||
}
|
||||
packs.javascript.push({
|
||||
packName: "codeql/javascript-experimental-atm-queries",
|
||||
version: "~0.0.2",
|
||||
});
|
||||
}
|
||||
|
||||
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
|
||||
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
||||
}
|
||||
@@ -378,10 +411,12 @@ async function parseQueryUses(
|
||||
languages: string[],
|
||||
codeQL: CodeQL,
|
||||
resultMap: Queries,
|
||||
packs: Packs,
|
||||
queryUses: string,
|
||||
tempDir: string,
|
||||
workspacePath: string,
|
||||
apiDetails: api.GitHubApiExternalRepoDetails,
|
||||
featureFlags: FeatureFlags,
|
||||
logger: Logger,
|
||||
configFile?: string
|
||||
) {
|
||||
@@ -408,7 +443,9 @@ async function parseQueryUses(
|
||||
languages,
|
||||
codeQL,
|
||||
resultMap,
|
||||
packs,
|
||||
queryUses,
|
||||
featureFlags,
|
||||
configFile
|
||||
);
|
||||
return;
|
||||
@@ -770,14 +807,16 @@ async function getLanguages(
|
||||
return parsedLanguages;
|
||||
}
|
||||
|
||||
async function addQueriesFromWorkflow(
|
||||
async function addQueriesAndPacksFromWorkflow(
|
||||
codeQL: CodeQL,
|
||||
queriesInput: string,
|
||||
languages: string[],
|
||||
resultMap: Queries,
|
||||
packs: Packs,
|
||||
tempDir: string,
|
||||
workspacePath: string,
|
||||
apiDetails: api.GitHubApiExternalRepoDetails,
|
||||
featureFlags: FeatureFlags,
|
||||
logger: Logger
|
||||
) {
|
||||
queriesInput = queriesInput.trim();
|
||||
@@ -789,10 +828,12 @@ async function addQueriesFromWorkflow(
|
||||
languages,
|
||||
codeQL,
|
||||
resultMap,
|
||||
packs,
|
||||
query,
|
||||
tempDir,
|
||||
workspacePath,
|
||||
apiDetails,
|
||||
featureFlags,
|
||||
logger
|
||||
);
|
||||
}
|
||||
@@ -819,6 +860,8 @@ export async function getDefaultConfig(
|
||||
packsInput: string | undefined,
|
||||
dbLocation: string | undefined,
|
||||
debugMode: boolean,
|
||||
debugArtifactName: string,
|
||||
debugDatabaseName: string,
|
||||
repository: RepositoryNwo,
|
||||
tempDir: string,
|
||||
toolCacheDir: string,
|
||||
@@ -826,6 +869,7 @@ export async function getDefaultConfig(
|
||||
workspacePath: string,
|
||||
gitHubVersion: GitHubVersion,
|
||||
apiDetails: api.GitHubApiCombinedDetails,
|
||||
featureFlags: FeatureFlags,
|
||||
logger: Logger
|
||||
): Promise<Config> {
|
||||
const languages = await getLanguages(
|
||||
@@ -843,21 +887,22 @@ export async function getDefaultConfig(
|
||||
};
|
||||
}
|
||||
await addDefaultQueries(codeQL, languages, queries);
|
||||
const packs = parsePacksFromInput(packsInput, languages) ?? {};
|
||||
if (queriesInput) {
|
||||
await addQueriesFromWorkflow(
|
||||
await addQueriesAndPacksFromWorkflow(
|
||||
codeQL,
|
||||
queriesInput,
|
||||
languages,
|
||||
queries,
|
||||
packs,
|
||||
tempDir,
|
||||
workspacePath,
|
||||
apiDetails,
|
||||
featureFlags,
|
||||
logger
|
||||
);
|
||||
}
|
||||
|
||||
const packs = parsePacksFromInput(packsInput, languages) ?? {};
|
||||
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
@@ -871,6 +916,8 @@ export async function getDefaultConfig(
|
||||
gitHubVersion,
|
||||
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -884,6 +931,8 @@ async function loadConfig(
|
||||
configFile: string,
|
||||
dbLocation: string | undefined,
|
||||
debugMode: boolean,
|
||||
debugArtifactName: string,
|
||||
debugDatabaseName: string,
|
||||
repository: RepositoryNwo,
|
||||
tempDir: string,
|
||||
toolCacheDir: string,
|
||||
@@ -891,6 +940,7 @@ async function loadConfig(
|
||||
workspacePath: string,
|
||||
gitHubVersion: GitHubVersion,
|
||||
apiDetails: api.GitHubApiCombinedDetails,
|
||||
featureFlags: FeatureFlags,
|
||||
logger: Logger
|
||||
): Promise<Config> {
|
||||
let parsedYAML: UserConfig;
|
||||
@@ -943,19 +993,28 @@ async function loadConfig(
|
||||
await addDefaultQueries(codeQL, languages, queries);
|
||||
}
|
||||
|
||||
const packs = parsePacks(
|
||||
parsedYAML[PACKS_PROPERTY] ?? {},
|
||||
packsInput,
|
||||
languages,
|
||||
configFile
|
||||
);
|
||||
|
||||
// If queries were provided using `with` in the action configuration,
|
||||
// they should take precedence over the queries in the config file
|
||||
// unless they're prefixed with "+", in which case they supplement those
|
||||
// in the config file.
|
||||
if (queriesInput) {
|
||||
await addQueriesFromWorkflow(
|
||||
await addQueriesAndPacksFromWorkflow(
|
||||
codeQL,
|
||||
queriesInput,
|
||||
languages,
|
||||
queries,
|
||||
packs,
|
||||
tempDir,
|
||||
workspacePath,
|
||||
apiDetails,
|
||||
featureFlags,
|
||||
logger
|
||||
);
|
||||
}
|
||||
@@ -978,10 +1037,12 @@ async function loadConfig(
|
||||
languages,
|
||||
codeQL,
|
||||
queries,
|
||||
packs,
|
||||
query[QUERIES_USES_PROPERTY],
|
||||
tempDir,
|
||||
workspacePath,
|
||||
apiDetails,
|
||||
featureFlags,
|
||||
logger,
|
||||
configFile
|
||||
);
|
||||
@@ -1021,13 +1082,6 @@ async function loadConfig(
|
||||
}
|
||||
}
|
||||
|
||||
const packs = parsePacks(
|
||||
parsedYAML[PACKS_PROPERTY] ?? {},
|
||||
packsInput,
|
||||
languages,
|
||||
configFile
|
||||
);
|
||||
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
@@ -1041,6 +1095,8 @@ async function loadConfig(
|
||||
gitHubVersion,
|
||||
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1211,6 +1267,8 @@ export async function initConfig(
|
||||
configFile: string | undefined,
|
||||
dbLocation: string | undefined,
|
||||
debugMode: boolean,
|
||||
debugArtifactName: string,
|
||||
debugDatabaseName: string,
|
||||
repository: RepositoryNwo,
|
||||
tempDir: string,
|
||||
toolCacheDir: string,
|
||||
@@ -1218,6 +1276,7 @@ export async function initConfig(
|
||||
workspacePath: string,
|
||||
gitHubVersion: GitHubVersion,
|
||||
apiDetails: api.GitHubApiCombinedDetails,
|
||||
featureFlags: FeatureFlags,
|
||||
logger: Logger
|
||||
): Promise<Config> {
|
||||
let config: Config;
|
||||
@@ -1231,6 +1290,8 @@ export async function initConfig(
|
||||
packsInput,
|
||||
dbLocation,
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
repository,
|
||||
tempDir,
|
||||
toolCacheDir,
|
||||
@@ -1238,6 +1299,7 @@ export async function initConfig(
|
||||
workspacePath,
|
||||
gitHubVersion,
|
||||
apiDetails,
|
||||
featureFlags,
|
||||
logger
|
||||
);
|
||||
} else {
|
||||
@@ -1248,6 +1310,8 @@ export async function initConfig(
|
||||
configFile,
|
||||
dbLocation,
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
repository,
|
||||
tempDir,
|
||||
toolCacheDir,
|
||||
@@ -1255,6 +1319,7 @@ export async function initConfig(
|
||||
workspacePath,
|
||||
gitHubVersion,
|
||||
apiDetails,
|
||||
featureFlags,
|
||||
logger
|
||||
);
|
||||
}
|
||||
|
||||
@@ -10,11 +10,18 @@ import * as apiClient from "./api-client";
|
||||
import { setCodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import { uploadDatabases } from "./database-upload";
|
||||
import { createFeatureFlags, FeatureFlag, FeatureFlags } from "./feature-flags";
|
||||
import { Language } from "./languages";
|
||||
import { Logger } from "./logging";
|
||||
import { RepositoryNwo } from "./repository";
|
||||
import { setupActionsVars, setupTests } from "./testing-utils";
|
||||
import {
|
||||
getRecordingLogger,
|
||||
LoggedMessage,
|
||||
setupActionsVars,
|
||||
setupTests,
|
||||
} from "./testing-utils";
|
||||
import {
|
||||
DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
DEFAULT_DEBUG_DATABASE_NAME,
|
||||
GitHubVariant,
|
||||
HTTPError,
|
||||
initializeEnvironment,
|
||||
@@ -28,6 +35,11 @@ test.beforeEach(() => {
|
||||
initializeEnvironment(Mode.actions, "1.2.3");
|
||||
});
|
||||
|
||||
const uploadToUploadsDomainFlags = createFeatureFlags([
|
||||
FeatureFlag.DatabaseUploadsEnabled,
|
||||
FeatureFlag.UploadsDomainEnabled,
|
||||
]);
|
||||
|
||||
const testRepoName: RepositoryNwo = { owner: "github", repo: "example" };
|
||||
const testApiDetails: GitHubApiDetails = {
|
||||
auth: "1234",
|
||||
@@ -48,66 +60,21 @@ function getTestConfig(tmpDir: string): Config {
|
||||
dbLocation: tmpDir,
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
}
|
||||
|
||||
interface LoggedMessage {
|
||||
type: "debug" | "info" | "warning" | "error";
|
||||
message: string | Error;
|
||||
}
|
||||
|
||||
function getRecordingLogger(messages: LoggedMessage[]): Logger {
|
||||
return {
|
||||
debug: (message: string) => {
|
||||
messages.push({ type: "debug", message });
|
||||
console.debug(message);
|
||||
},
|
||||
info: (message: string) => {
|
||||
messages.push({ type: "info", message });
|
||||
console.info(message);
|
||||
},
|
||||
warning: (message: string | Error) => {
|
||||
messages.push({ type: "warning", message });
|
||||
console.warn(message);
|
||||
},
|
||||
error: (message: string | Error) => {
|
||||
messages.push({ type: "error", message });
|
||||
console.error(message);
|
||||
},
|
||||
isDebug: () => true,
|
||||
startGroup: () => undefined,
|
||||
endGroup: () => undefined,
|
||||
};
|
||||
}
|
||||
|
||||
function mockHttpRequests(
|
||||
optInStatusCode: number,
|
||||
useUploadDomain?: boolean,
|
||||
databaseUploadStatusCode?: number
|
||||
async function mockHttpRequests(
|
||||
featureFlags: FeatureFlags,
|
||||
databaseUploadStatusCode: number
|
||||
) {
|
||||
// Passing an auth token is required, so we just use a dummy value
|
||||
const client = github.getOctokit("123");
|
||||
|
||||
const requestSpy = sinon.stub(client, "request");
|
||||
|
||||
const optInSpy = requestSpy.withArgs(
|
||||
"GET /repos/:owner/:repo/code-scanning/codeql/databases"
|
||||
);
|
||||
if (optInStatusCode < 300) {
|
||||
optInSpy.resolves({
|
||||
status: optInStatusCode,
|
||||
data: {
|
||||
useUploadDomain,
|
||||
},
|
||||
headers: {},
|
||||
url: "GET /repos/:owner/:repo/code-scanning/codeql/databases",
|
||||
});
|
||||
} else {
|
||||
optInSpy.throws(new HTTPError("some error message", optInStatusCode));
|
||||
}
|
||||
|
||||
if (databaseUploadStatusCode !== undefined) {
|
||||
const url = useUploadDomain
|
||||
const url = (await featureFlags.getValue(FeatureFlag.UploadsDomainEnabled))
|
||||
? "POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name"
|
||||
: "PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language";
|
||||
const databaseUploadSpy = requestSpy.withArgs(url);
|
||||
@@ -118,7 +85,6 @@ function mockHttpRequests(
|
||||
new HTTPError("some error message", databaseUploadStatusCode)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
sinon.stub(apiClient, "getApiClient").value(() => client);
|
||||
}
|
||||
@@ -136,6 +102,7 @@ test("Abort database upload if 'upload-database' input set to false", async (t)
|
||||
await uploadDatabases(
|
||||
testRepoName,
|
||||
getTestConfig(tmpDir),
|
||||
uploadToUploadsDomainFlags,
|
||||
testApiDetails,
|
||||
getRecordingLogger(loggedMessages)
|
||||
);
|
||||
@@ -165,6 +132,7 @@ test("Abort database upload if running against GHES", async (t) => {
|
||||
await uploadDatabases(
|
||||
testRepoName,
|
||||
config,
|
||||
createFeatureFlags([]),
|
||||
testApiDetails,
|
||||
getRecordingLogger(loggedMessages)
|
||||
);
|
||||
@@ -194,6 +162,7 @@ test("Abort database upload if running against GHAE", async (t) => {
|
||||
await uploadDatabases(
|
||||
testRepoName,
|
||||
config,
|
||||
createFeatureFlags([]),
|
||||
testApiDetails,
|
||||
getRecordingLogger(loggedMessages)
|
||||
);
|
||||
@@ -220,6 +189,7 @@ test("Abort database upload if not analyzing default branch", async (t) => {
|
||||
await uploadDatabases(
|
||||
testRepoName,
|
||||
getTestConfig(tmpDir),
|
||||
uploadToUploadsDomainFlags,
|
||||
testApiDetails,
|
||||
getRecordingLogger(loggedMessages)
|
||||
);
|
||||
@@ -233,7 +203,7 @@ test("Abort database upload if not analyzing default branch", async (t) => {
|
||||
});
|
||||
});
|
||||
|
||||
test("Abort database upload if opt-in request returns 404", async (t) => {
|
||||
test("Abort database upload if feature flag is disabled", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
sinon
|
||||
@@ -242,8 +212,6 @@ test("Abort database upload if opt-in request returns 404", async (t) => {
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
mockHttpRequests(404);
|
||||
|
||||
setCodeQL({
|
||||
async databaseBundle() {
|
||||
return;
|
||||
@@ -254,6 +222,7 @@ test("Abort database upload if opt-in request returns 404", async (t) => {
|
||||
await uploadDatabases(
|
||||
testRepoName,
|
||||
getTestConfig(tmpDir),
|
||||
createFeatureFlags([FeatureFlag.UploadsDomainEnabled]),
|
||||
testApiDetails,
|
||||
getRecordingLogger(loggedMessages)
|
||||
);
|
||||
@@ -268,41 +237,6 @@ test("Abort database upload if opt-in request returns 404", async (t) => {
|
||||
});
|
||||
});
|
||||
|
||||
test("Abort database upload if opt-in request fails with something other than 404", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
mockHttpRequests(500);
|
||||
|
||||
setCodeQL({
|
||||
async databaseBundle() {
|
||||
return;
|
||||
},
|
||||
});
|
||||
|
||||
const loggedMessages = [] as LoggedMessage[];
|
||||
await uploadDatabases(
|
||||
testRepoName,
|
||||
getTestConfig(tmpDir),
|
||||
testApiDetails,
|
||||
getRecordingLogger(loggedMessages)
|
||||
);
|
||||
t.assert(
|
||||
loggedMessages.find(
|
||||
(v) =>
|
||||
v.type === "info" &&
|
||||
v.message ===
|
||||
"Skipping database upload due to unknown error: Error: some error message"
|
||||
) !== undefined
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test("Don't crash if uploading a database fails", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
@@ -312,7 +246,11 @@ test("Don't crash if uploading a database fails", async (t) => {
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
mockHttpRequests(200, false, 500);
|
||||
const featureFlags = createFeatureFlags([
|
||||
FeatureFlag.DatabaseUploadsEnabled,
|
||||
]);
|
||||
|
||||
await mockHttpRequests(featureFlags, 500);
|
||||
|
||||
setCodeQL({
|
||||
async databaseBundle(_: string, outputFilePath: string) {
|
||||
@@ -324,9 +262,11 @@ test("Don't crash if uploading a database fails", async (t) => {
|
||||
await uploadDatabases(
|
||||
testRepoName,
|
||||
getTestConfig(tmpDir),
|
||||
featureFlags,
|
||||
testApiDetails,
|
||||
getRecordingLogger(loggedMessages)
|
||||
);
|
||||
|
||||
t.assert(
|
||||
loggedMessages.find(
|
||||
(v) =>
|
||||
@@ -347,7 +287,7 @@ test("Successfully uploading a database to api.github.com", async (t) => {
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
mockHttpRequests(200, false, 201);
|
||||
await mockHttpRequests(uploadToUploadsDomainFlags, 201);
|
||||
|
||||
setCodeQL({
|
||||
async databaseBundle(_: string, outputFilePath: string) {
|
||||
@@ -359,6 +299,7 @@ test("Successfully uploading a database to api.github.com", async (t) => {
|
||||
await uploadDatabases(
|
||||
testRepoName,
|
||||
getTestConfig(tmpDir),
|
||||
uploadToUploadsDomainFlags,
|
||||
testApiDetails,
|
||||
getRecordingLogger(loggedMessages)
|
||||
);
|
||||
@@ -381,7 +322,7 @@ test("Successfully uploading a database to uploads.github.com", async (t) => {
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
mockHttpRequests(200, true, 201);
|
||||
await mockHttpRequests(uploadToUploadsDomainFlags, 201);
|
||||
|
||||
setCodeQL({
|
||||
async databaseBundle(_: string, outputFilePath: string) {
|
||||
@@ -393,6 +334,7 @@ test("Successfully uploading a database to uploads.github.com", async (t) => {
|
||||
await uploadDatabases(
|
||||
testRepoName,
|
||||
getTestConfig(tmpDir),
|
||||
uploadToUploadsDomainFlags,
|
||||
testApiDetails,
|
||||
getRecordingLogger(loggedMessages)
|
||||
);
|
||||
|
||||
@@ -4,6 +4,7 @@ import * as actionsUtil from "./actions-util";
|
||||
import { getApiClient, GitHubApiDetails } from "./api-client";
|
||||
import { getCodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import { FeatureFlag, FeatureFlags } from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
import { RepositoryNwo } from "./repository";
|
||||
import * as util from "./util";
|
||||
@@ -12,6 +13,7 @@ import { bundleDb } from "./util";
|
||||
export async function uploadDatabases(
|
||||
repositoryNwo: RepositoryNwo,
|
||||
config: Config,
|
||||
featureFlags: FeatureFlags,
|
||||
apiDetails: GitHubApiDetails,
|
||||
logger: Logger
|
||||
): Promise<void> {
|
||||
@@ -32,36 +34,27 @@ export async function uploadDatabases(
|
||||
return;
|
||||
}
|
||||
|
||||
const client = getApiClient(apiDetails);
|
||||
let useUploadDomain: boolean;
|
||||
try {
|
||||
const response = await client.request(
|
||||
"GET /repos/:owner/:repo/code-scanning/codeql/databases",
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
}
|
||||
);
|
||||
useUploadDomain = response.data["uploads_domain_enabled"];
|
||||
} catch (e) {
|
||||
if (util.isHTTPError(e) && e.status === 404) {
|
||||
if (!(await featureFlags.getValue(FeatureFlag.DatabaseUploadsEnabled))) {
|
||||
logger.debug(
|
||||
"Repository is not opted in to database uploads. Skipping upload."
|
||||
);
|
||||
} else {
|
||||
console.log(e);
|
||||
logger.info(`Skipping database upload due to unknown error: ${e}`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const client = getApiClient(apiDetails);
|
||||
const codeql = await getCodeQL(config.codeQLCmd);
|
||||
const useUploadDomain = await featureFlags.getValue(
|
||||
FeatureFlag.UploadsDomainEnabled
|
||||
);
|
||||
|
||||
for (const language of config.languages) {
|
||||
// Upload the database bundle.
|
||||
// Although we are uploading arbitrary file contents to the API, it's worth
|
||||
// noting that it's the API's job to validate that the contents is acceptable.
|
||||
// This API method is available to anyone with write access to the repo.
|
||||
const payload = fs.readFileSync(await bundleDb(config, language, codeql));
|
||||
const payload = fs.readFileSync(
|
||||
await bundleDb(config, language, codeql, language)
|
||||
);
|
||||
try {
|
||||
if (useUploadDomain) {
|
||||
await client.request(
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20211208"
|
||||
"bundleVersion": "codeql-bundle-20220112"
|
||||
}
|
||||
|
||||
165
src/feature-flags.test.ts
Normal file
165
src/feature-flags.test.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import test from "ava";
|
||||
|
||||
import { GitHubApiDetails } from "./api-client";
|
||||
import { FeatureFlag, GitHubFeatureFlags } from "./feature-flags";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import {
|
||||
getRecordingLogger,
|
||||
LoggedMessage,
|
||||
mockFeatureFlagApiEndpoint,
|
||||
setupActionsVars,
|
||||
setupTests,
|
||||
} from "./testing-utils";
|
||||
import * as util from "./util";
|
||||
import { GitHubVariant, initializeEnvironment, Mode, withTmpDir } from "./util";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
test.beforeEach(() => {
|
||||
initializeEnvironment(Mode.actions, "1.2.3");
|
||||
});
|
||||
|
||||
const testApiDetails: GitHubApiDetails = {
|
||||
auth: "1234",
|
||||
url: "https://github.com",
|
||||
};
|
||||
|
||||
const testRepositoryNwo = parseRepositoryNwo("github/example");
|
||||
|
||||
const ALL_FEATURE_FLAGS_DISABLED_VARIANTS: Array<{
|
||||
description: string;
|
||||
gitHubVersion: util.GitHubVersion;
|
||||
}> = [
|
||||
{
|
||||
description: "GHES",
|
||||
gitHubVersion: { type: GitHubVariant.GHES, version: "3.0.0" },
|
||||
},
|
||||
{ description: "GHAE", gitHubVersion: { type: GitHubVariant.GHAE } },
|
||||
];
|
||||
|
||||
for (const variant of ALL_FEATURE_FLAGS_DISABLED_VARIANTS) {
|
||||
test(`All feature flags are disabled if running against ${variant.description}`, async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
const loggedMessages = [];
|
||||
const featureFlags = new GitHubFeatureFlags(
|
||||
variant.gitHubVersion,
|
||||
testApiDetails,
|
||||
testRepositoryNwo,
|
||||
getRecordingLogger(loggedMessages)
|
||||
);
|
||||
|
||||
for (const flag of Object.values(FeatureFlag)) {
|
||||
t.assert((await featureFlags.getValue(flag)) === false);
|
||||
}
|
||||
|
||||
t.assert(
|
||||
loggedMessages.find(
|
||||
(v: LoggedMessage) =>
|
||||
v.type === "debug" &&
|
||||
v.message ===
|
||||
"Not running against github.com. Disabling all feature flags."
|
||||
) !== undefined
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
test("Feature flags are disabled if they're not returned in API response", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
const loggedMessages = [];
|
||||
const featureFlags = new GitHubFeatureFlags(
|
||||
{ type: GitHubVariant.DOTCOM },
|
||||
testApiDetails,
|
||||
testRepositoryNwo,
|
||||
getRecordingLogger(loggedMessages)
|
||||
);
|
||||
|
||||
mockFeatureFlagApiEndpoint(200, {});
|
||||
|
||||
for (const flag of Object.values(FeatureFlag)) {
|
||||
t.assert((await featureFlags.getValue(flag)) === false);
|
||||
}
|
||||
|
||||
for (const featureFlag of [
|
||||
"database_uploads_enabled",
|
||||
"ml_powered_queries_enabled",
|
||||
"uploads_domain_enabled",
|
||||
]) {
|
||||
t.assert(
|
||||
loggedMessages.find(
|
||||
(v: LoggedMessage) =>
|
||||
v.type === "debug" &&
|
||||
v.message ===
|
||||
`Feature flag '${featureFlag}' undefined in API response, considering it disabled.`
|
||||
) !== undefined
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("Feature flags exception is propagated if the API request errors", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
const featureFlags = new GitHubFeatureFlags(
|
||||
{ type: GitHubVariant.DOTCOM },
|
||||
testApiDetails,
|
||||
testRepositoryNwo,
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
|
||||
mockFeatureFlagApiEndpoint(500, {});
|
||||
|
||||
await t.throwsAsync(async () => featureFlags.preloadFeatureFlags(), {
|
||||
message:
|
||||
"Encountered an error while trying to load feature flags: Error: some error message",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const FEATURE_FLAGS = [
|
||||
"database_uploads_enabled",
|
||||
"ml_powered_queries_enabled",
|
||||
"uploads_domain_enabled",
|
||||
];
|
||||
|
||||
for (const featureFlag of FEATURE_FLAGS) {
|
||||
test(`Feature flag '${featureFlag}' is enabled if enabled in the API response`, async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
const featureFlags = new GitHubFeatureFlags(
|
||||
{ type: GitHubVariant.DOTCOM },
|
||||
testApiDetails,
|
||||
testRepositoryNwo,
|
||||
getRunnerLogger(true)
|
||||
);
|
||||
|
||||
const expectedFeatureFlags = {};
|
||||
for (const f of FEATURE_FLAGS) {
|
||||
expectedFeatureFlags[f] = false;
|
||||
}
|
||||
expectedFeatureFlags[featureFlag] = true;
|
||||
mockFeatureFlagApiEndpoint(200, expectedFeatureFlags);
|
||||
|
||||
const actualFeatureFlags = {
|
||||
database_uploads_enabled: await featureFlags.getValue(
|
||||
FeatureFlag.DatabaseUploadsEnabled
|
||||
),
|
||||
ml_powered_queries_enabled: await featureFlags.getValue(
|
||||
FeatureFlag.MlPoweredQueriesEnabled
|
||||
),
|
||||
uploads_domain_enabled: await featureFlags.getValue(
|
||||
FeatureFlag.UploadsDomainEnabled
|
||||
),
|
||||
};
|
||||
|
||||
t.deepEqual(actualFeatureFlags, expectedFeatureFlags);
|
||||
});
|
||||
});
|
||||
}
|
||||
96
src/feature-flags.ts
Normal file
96
src/feature-flags.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { getApiClient, GitHubApiDetails } from "./api-client";
|
||||
import { Logger } from "./logging";
|
||||
import { RepositoryNwo } from "./repository";
|
||||
import * as util from "./util";
|
||||
|
||||
export interface FeatureFlags {
|
||||
getValue(flag: FeatureFlag): Promise<boolean>;
|
||||
}
|
||||
|
||||
export enum FeatureFlag {
|
||||
DatabaseUploadsEnabled = "database_uploads_enabled",
|
||||
MlPoweredQueriesEnabled = "ml_powered_queries_enabled",
|
||||
UploadsDomainEnabled = "uploads_domain_enabled",
|
||||
}
|
||||
|
||||
/**
|
||||
* A response from the GitHub API that contains feature flag enablement information for the CodeQL
|
||||
* Action.
|
||||
*
|
||||
* It maps feature flags to whether they are enabled or not.
|
||||
*/
|
||||
type FeatureFlagsApiResponse = Partial<Record<FeatureFlag, boolean>>;
|
||||
|
||||
export class GitHubFeatureFlags implements FeatureFlags {
|
||||
private cachedApiResponse: FeatureFlagsApiResponse | undefined;
|
||||
|
||||
constructor(
|
||||
private gitHubVersion: util.GitHubVersion,
|
||||
private apiDetails: GitHubApiDetails,
|
||||
private repositoryNwo: RepositoryNwo,
|
||||
private logger: Logger
|
||||
) {}
|
||||
|
||||
async getValue(flag: FeatureFlag): Promise<boolean> {
|
||||
const response = (await this.getApiResponse())[flag];
|
||||
if (response === undefined) {
|
||||
this.logger.debug(
|
||||
`Feature flag '${flag}' undefined in API response, considering it disabled.`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
return response;
|
||||
}
|
||||
|
||||
async preloadFeatureFlags(): Promise<void> {
|
||||
await this.getApiResponse();
|
||||
}
|
||||
|
||||
private async getApiResponse(): Promise<FeatureFlagsApiResponse> {
|
||||
const loadApiResponse = async () => {
|
||||
// Do nothing when not running against github.com
|
||||
if (this.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
|
||||
this.logger.debug(
|
||||
"Not running against github.com. Disabling all feature flags."
|
||||
);
|
||||
return {};
|
||||
}
|
||||
const client = getApiClient(this.apiDetails);
|
||||
try {
|
||||
const response = await client.request(
|
||||
"GET /repos/:owner/:repo/code-scanning/codeql-action/features",
|
||||
{
|
||||
owner: this.repositoryNwo.owner,
|
||||
repo: this.repositoryNwo.repo,
|
||||
}
|
||||
);
|
||||
return response.data;
|
||||
} catch (e) {
|
||||
// Some feature flags, such as `ml_powered_queries_enabled` affect the produced alerts.
|
||||
// Considering these feature flags disabled in the event of a transient error could
|
||||
// therefore lead to alert churn. As a result, we crash if we cannot determine the value of
|
||||
// the feature flags.
|
||||
throw new Error(
|
||||
`Encountered an error while trying to load feature flags: ${e}`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const apiResponse = this.cachedApiResponse || (await loadApiResponse());
|
||||
this.cachedApiResponse = apiResponse;
|
||||
return apiResponse;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a feature flags instance with the specified set of enabled flags.
|
||||
*
|
||||
* This should be only used within tests.
|
||||
*/
|
||||
export function createFeatureFlags(enabledFlags: FeatureFlag[]): FeatureFlags {
|
||||
return {
|
||||
getValue: async (flag) => {
|
||||
return enabledFlags.includes(flag);
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -189,16 +189,18 @@ test("resolveUriToFile", (t) => {
|
||||
|
||||
test("addFingerprints", async (t) => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs
|
||||
const input = JSON.parse(
|
||||
fs
|
||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
|
||||
.toString();
|
||||
let expected = fs
|
||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.expected.sarif`)
|
||||
.toString();
|
||||
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
.toString()
|
||||
);
|
||||
const expected = JSON.parse(
|
||||
fs
|
||||
.readFileSync(
|
||||
`${__dirname}/../src/testdata/fingerprinting.expected.sarif`
|
||||
)
|
||||
.toString()
|
||||
);
|
||||
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
|
||||
@@ -215,16 +217,18 @@ test("addFingerprints", async (t) => {
|
||||
|
||||
test("missingRegions", async (t) => {
|
||||
// Run an end-to-end test on a test file
|
||||
let input = fs
|
||||
const input = JSON.parse(
|
||||
fs
|
||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
|
||||
.toString();
|
||||
let expected = fs
|
||||
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.expected.sarif`)
|
||||
.toString();
|
||||
|
||||
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||
input = JSON.stringify(JSON.parse(input));
|
||||
expected = JSON.stringify(JSON.parse(expected));
|
||||
.toString()
|
||||
);
|
||||
const expected = JSON.parse(
|
||||
fs
|
||||
.readFileSync(
|
||||
`${__dirname}/../src/testdata/fingerprinting2.expected.sarif`
|
||||
)
|
||||
.toString()
|
||||
);
|
||||
|
||||
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||
const sourceRoot = path.normalize(`${__dirname}/../src/testdata`);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user