mirror of
https://github.com/github/codeql-action.git
synced 2025-12-06 15:58:06 +08:00
Compare commits
101 Commits
error-impr
...
daverlo/ge
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7a50226279 | ||
|
|
382c51457f | ||
|
|
e9e2284547 | ||
|
|
9597f2e889 | ||
|
|
fd94b16cf5 | ||
|
|
288f49eee5 | ||
|
|
c2b7b7f977 | ||
|
|
008b0062c6 | ||
|
|
c5e07ebfcc | ||
|
|
96ded2b500 | ||
|
|
5eb3736850 | ||
|
|
e9efcf1900 | ||
|
|
73c73baaa2 | ||
|
|
aaeb9751bb | ||
|
|
2527130a32 | ||
|
|
2828c43ac7 | ||
|
|
4b56177c28 | ||
|
|
150e07f914 | ||
|
|
5b0aafadb1 | ||
|
|
9c5f7640e5 | ||
|
|
cf08f5a9cd | ||
|
|
34b372292b | ||
|
|
6d7a135fea | ||
|
|
bcf676e52d | ||
|
|
7c2a7b236c | ||
|
|
0fdcc52338 | ||
|
|
d5693a7fd2 | ||
|
|
eb4eda5cbe | ||
|
|
97eafbc804 | ||
|
|
6e18b27d4d | ||
|
|
de0b59097a | ||
|
|
d5c453c995 | ||
|
|
657540584e | ||
|
|
a0660c80bd | ||
|
|
396f7167d8 | ||
|
|
e5ad069f2c | ||
|
|
5b35de62bd | ||
|
|
d5853409b4 | ||
|
|
8608105240 | ||
|
|
ac66bbe1fe | ||
|
|
3a28cb4ca8 | ||
|
|
8127c47bbd | ||
|
|
44c88fdd05 | ||
|
|
6230b36dc2 | ||
|
|
3d552ba624 | ||
|
|
42235cc048 | ||
|
|
631929a68f | ||
|
|
128c2cf718 | ||
|
|
69bf3f24d4 | ||
|
|
02d3d62def | ||
|
|
f0894d52f3 | ||
|
|
1fb3aaff6e | ||
|
|
dc366899d2 | ||
|
|
4896ba51da | ||
|
|
30d2cce9f8 | ||
|
|
1ef33b0330 | ||
|
|
368c14c502 | ||
|
|
e8896a906a | ||
|
|
f5ccce0c86 | ||
|
|
74268130c6 | ||
|
|
ae2d7afe3b | ||
|
|
12a37237d2 | ||
|
|
b38a014f94 | ||
|
|
5587e128ff | ||
|
|
74b4d8a6db | ||
|
|
175d681835 | ||
|
|
f4e72f4a09 | ||
|
|
5f057318b6 | ||
|
|
0f88c0111f | ||
|
|
bfaa0cf943 | ||
|
|
337dbe5618 | ||
|
|
6f42543a85 | ||
|
|
93dd64d351 | ||
|
|
87758a1402 | ||
|
|
813cb0479f | ||
|
|
9769e4a6df | ||
|
|
315a9f4b3c | ||
|
|
0446cb0aff | ||
|
|
29cf06569d | ||
|
|
ee63f4ee4b | ||
|
|
5b4f4e40af | ||
|
|
58a0034549 | ||
|
|
c7c1aa8045 | ||
|
|
b673c57b89 | ||
|
|
d138b00811 | ||
|
|
b86c3701ed | ||
|
|
7bb6ac6c60 | ||
|
|
e775d4e893 | ||
|
|
d1d80761ef | ||
|
|
7a78ec0a54 | ||
|
|
da3d6d25eb | ||
|
|
60fdcc2376 | ||
|
|
010117c1b7 | ||
|
|
7970d85db4 | ||
|
|
7496ac4fb3 | ||
|
|
c13712badb | ||
|
|
a30f8542ee | ||
|
|
aca790b504 | ||
|
|
80a5f3c700 | ||
|
|
999c772fa3 | ||
|
|
74eb3b3541 |
18
.github/workflows/cli.yml
vendored
Normal file
18
.github/workflows/cli.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
name: "CodeScanning CLI"
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# Build the CLI
|
||||
- name: Build CLI
|
||||
run: npm run build-cli
|
||||
|
||||
# Upload an empty SARIF file
|
||||
- name: Upload with CLI
|
||||
run: node cli/code-scanning-cli.js upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_API_URL --github-auth ${{ github.token }}
|
||||
2
.github/workflows/integration-testing.yml
vendored
2
.github/workflows/integration-testing.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- run: |
|
||||
cd "$CODEQL_ACTION_DATABASE_DIR"
|
||||
cd "$RUNNER_TEMP/codeql_databases"
|
||||
# List all directories as there will be precisely one directory per database
|
||||
# but there may be other files in this directory such as query suites.
|
||||
if [ "$(ls -d */ | wc -l)" != 6 ] || \
|
||||
|
||||
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/cli/
|
||||
|
||||
25
.vscode/launch.json
vendored
Normal file
25
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Debug AVA test file",
|
||||
"runtimeExecutable": "${workspaceFolder}/node_modules/.bin/ava",
|
||||
"runtimeArgs": [
|
||||
"${file}",
|
||||
"--break",
|
||||
"--serial",
|
||||
"--timeout=20m"
|
||||
],
|
||||
"port": 9229,
|
||||
"outputCapture": "std",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**/*.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
## Contributing
|
||||
# Contributing
|
||||
|
||||
[fork]: https://github.com/github/codeql-action/fork
|
||||
[pr]: https://github.com/github/codeql-action/compare
|
||||
@@ -20,10 +20,31 @@ Before you start, ensure that you have a recent version of node installed. You c
|
||||
* Run tests: `npm run test`. You’ll need to ensure that the JavaScript files are up-to-date first by running the command above.
|
||||
* Run the linter: `npm run lint`.
|
||||
|
||||
This project also includes configuration to run tests from VSCode (with support for breakpoints) - open the test file you wish to run and choose "Debug AVA test file" from the Run menu in the Run panel.
|
||||
|
||||
### Running the action
|
||||
|
||||
To see the effect of your changes and to test them, push your changes in a branch and then look at the [Actions output](https://github.com/github/codeql-action/actions) for that branch. You can also exercise the code locally by running the automated tests.
|
||||
|
||||
### Running the action locally
|
||||
|
||||
It is possible to run this action locally via [act](https://github.com/nektos/act) via the following steps:
|
||||
|
||||
1. Create a GitHub [Personal Access Token](https://github.com/settings/tokens) (PAT).
|
||||
1. Install [act](https://github.com/nektos/act) v0.2.10 or greater.
|
||||
1. Add a `.env` file in the root of the project you are running:
|
||||
|
||||
```bash
|
||||
CODEQL_LOCAL_RUN=true
|
||||
|
||||
# Optional, for better logging
|
||||
GITHUB_JOB=<ANY_JOB_NAME>
|
||||
```
|
||||
|
||||
1. Run `act -j codeql -s GITHUB_TOKEN=<PAT>`
|
||||
|
||||
Running locally will generate the CodeQL database and run all the queries, but it will avoid uploading and reporting results to GitHub. Note that this must be done on a repository that _consumes_ this action, not this repository. The use case is to debug failures of this action on specific repositories.
|
||||
|
||||
### Integration tests
|
||||
|
||||
As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change you’re making, you may want to add a test to this file or extend an existing one.
|
||||
@@ -35,6 +56,7 @@ As well as the unit tests (see _Common tasks_ above), there are integration test
|
||||
3. Make your change, add tests, and make sure the tests still pass
|
||||
4. Push to your fork and [submit a pull request][pr]
|
||||
5. Pat yourself on the back and wait for your pull request to be reviewed and merged.
|
||||
|
||||
If you're a GitHub staff member, you can merge your own PR once it's approved; for external contributors, GitHub staff will merge your PR once it's approved.
|
||||
|
||||
Here are a few things you can do that will increase the likelihood of your pull request being accepted:
|
||||
|
||||
@@ -19,7 +19,10 @@ inputs:
|
||||
threads:
|
||||
description: The number of threads to be used by CodeQL.
|
||||
required: false
|
||||
default: "1"
|
||||
checkout_path:
|
||||
description: "The path at which the analyzed repository was checked out. Used to relativeize any absolute paths in the uploaded SARIF file."
|
||||
required: false
|
||||
default: ${{ github.workspace }}
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
|
||||
@@ -5,7 +5,7 @@ inputs:
|
||||
tools:
|
||||
description: URL of CodeQL tools
|
||||
required: false
|
||||
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200630/codeql-bundle.tar.gz
|
||||
# If not specified the Action will check in several places until it finds the CodeQL tools.
|
||||
languages:
|
||||
description: The languages to be analysed
|
||||
required: false
|
||||
|
||||
4
lib/analysis-paths.js
generated
4
lib/analysis-paths.js
generated
@@ -23,7 +23,7 @@ function buildIncludeExcludeEnvVar(paths) {
|
||||
}
|
||||
return paths.join('\n');
|
||||
}
|
||||
function includeAndExcludeAnalysisPaths(config, languages) {
|
||||
function includeAndExcludeAnalysisPaths(config) {
|
||||
// The 'LGTM_INDEX_INCLUDE' and 'LGTM_INDEX_EXCLUDE' environment variables
|
||||
// control which files/directories are traversed when scanning.
|
||||
// This allows including files that otherwise would not be scanned, or
|
||||
@@ -51,7 +51,7 @@ function includeAndExcludeAnalysisPaths(config, languages) {
|
||||
if ((config.paths.length !== 0 ||
|
||||
config.pathsIgnore.length !== 0 ||
|
||||
filters.length !== 0) &&
|
||||
!languages.every(isInterpretedLanguage)) {
|
||||
!config.languages.every(isInterpretedLanguage)) {
|
||||
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEjD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACrE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,8BAA8B,CAAC,MAA0B,EAAE,SAAmB;IAC5F,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACpF;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC;KAC1F;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC/D;IAED,oEAAoE;IACpE,sEAAsE;IACtE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;QACxB,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC;QAC/B,OAAO,CAAC,MAAM,KAAK,CAAC,CAAC;QACvB,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAC3C,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC5G;AACH,CAAC;AAjCD,wEAiCC"}
|
||||
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEjD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACrE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACpF;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC;KAC1F;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC/D;IAED,oEAAoE;IACpE,sEAAsE;IACtE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;QACxB,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC;QAC/B,OAAO,CAAC,MAAM,KAAK,CAAC,CAAC;QACvB,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAClD,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC5G;AACH,CAAC;AAjCD,wEAiCC"}
|
||||
23
lib/analysis-paths.test.js
generated
23
lib/analysis-paths.test.js
generated
@@ -12,21 +12,30 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default("emptyPaths", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
|
||||
t.is(process.env['LGTM_INDEX_FILTERS'], undefined);
|
||||
});
|
||||
ava_1.default("nonEmptyPaths", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
config.paths.push('path1', 'path2', '**/path3');
|
||||
config.pathsIgnore.push('path4', 'path5', 'path6/**');
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||
const config = {
|
||||
languages: [],
|
||||
queries: {},
|
||||
paths: ['path1', 'path2', '**/path3'],
|
||||
pathsIgnore: ['path4', 'path5', 'path6/**'],
|
||||
originalUserInput: {},
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
|
||||
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path4\npath5');
|
||||
t.is(process.env['LGTM_INDEX_FILTERS'], 'include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**');
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,4DAA8C;AAC9C,mDAA2C;AAE3C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;IAChD,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;IACtD,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,gGAAgG,CAAC,CAAC;AAC5I,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA2C;AAE3C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;QACT,iBAAiB,EAAE,EAAE;KACtB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QAC3C,iBAAiB,EAAE,EAAE;KACtB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,gGAAgG,CAAC,CAAC;AAC5I,CAAC,CAAC,CAAC"}
|
||||
28
lib/api-client.js
generated
28
lib/api-client.js
generated
@@ -13,10 +13,34 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
exports.getApiClient = function () {
|
||||
return new github.GitHub(core.getInput('token'), {
|
||||
const util_1 = require("./util");
|
||||
exports.getApiClient = function (githubAuth, githubApiUrl, allowLocalRun = false) {
|
||||
if (util_1.isLocalRun() && !allowLocalRun) {
|
||||
throw new Error('Invalid API call in local run');
|
||||
}
|
||||
return new github.GitHub({
|
||||
auth: parseAuth(githubAuth),
|
||||
baseUrl: githubApiUrl,
|
||||
userAgent: "CodeQL Action",
|
||||
log: console_log_level_1.default({ level: "debug" })
|
||||
});
|
||||
};
|
||||
// Parses the user input as either a single token,
|
||||
// or a username and password / PAT.
|
||||
function parseAuth(auth) {
|
||||
// Check if it's a username:password pair
|
||||
const c = auth.indexOf(':');
|
||||
if (c !== -1) {
|
||||
return 'basic ' + Buffer.from(auth).toString('base64');
|
||||
}
|
||||
// Otherwise use the token as it is
|
||||
return auth;
|
||||
}
|
||||
// Temporary function to aid in the transition to running on and off of github actions.
|
||||
// Once all code has been coverted this function should be removed or made canonical
|
||||
// and called only from the action entrypoints.
|
||||
function getActionsApiClient(allowLocalRun = false) {
|
||||
return exports.getApiClient(core.getInput('token'), util_1.getRequiredEnvParam('GITHUB_API_URL'), allowLocalRun);
|
||||
}
|
||||
exports.getActionsApiClient = getActionsApiClient;
|
||||
//# sourceMappingURL=api-client.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAEnC,QAAA,YAAY,GAAG;IAC1B,OAAO,IAAI,MAAM,CAAC,MAAM,CACtB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB;QACE,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACP,CAAC,CAAC"}
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAEhD,iCAAyD;AAE5C,QAAA,YAAY,GAAG,UAAS,UAAkB,EAAE,YAAoB,EAAE,aAAa,GAAG,KAAK;IAClG,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,MAAM,CAAC,MAAM,CACtB;QACE,IAAI,EAAE,SAAS,CAAC,UAAU,CAAC;QAC3B,OAAO,EAAE,YAAY;QACrB,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACP,CAAC,CAAC;AAEF,kDAAkD;AAClD,oCAAoC;AACpC,SAAS,SAAS,CAAC,IAAY;IAC7B,yCAAyC;IACzC,MAAM,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5B,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;QACZ,OAAO,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;KACxD;IAED,mCAAmC;IACnC,OAAO,IAAI,CAAC;AACd,CAAC;AAED,uFAAuF;AACvF,oFAAoF;AACpF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,OAAO,oBAAY,CACjB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,0BAAmB,CAAC,gBAAgB,CAAC,EACrC,aAAa,CAAC,CAAC;AACnB,CAAC;AALD,kDAKC"}
|
||||
29
lib/autobuild.js
generated
29
lib/autobuild.js
generated
@@ -9,20 +9,34 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const config_utils = __importStar(require("./config-utils"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
||||
var _a, _b;
|
||||
const status = failingLanguage !== undefined || cause !== undefined ? 'failure' : 'success';
|
||||
const statusReportBase = await util.createStatusReportBase('autobuild', status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
autobuild_languages: allLanguages.join(','),
|
||||
autobuild_failure: failingLanguage,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
var _a;
|
||||
const startedAt = new Date();
|
||||
let language;
|
||||
try {
|
||||
if (util.should_abort('autobuild', true) || !await util.reportActionStarting('autobuild')) {
|
||||
util.prepareLocalRunEnvironment();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('autobuild', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
const config = await config_utils.getConfig();
|
||||
// Attempt to find a language to autobuild
|
||||
// We want pick the dominant language in the repo from the ones we're able to build
|
||||
// The languages are sorted in order specified by user or by lines of code if we got
|
||||
// them from the GitHub API, so try to build the first language on the list.
|
||||
const autobuildLanguages = ((_a = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]) === null || _a === void 0 ? void 0 : _a.split(',')) || [];
|
||||
const language = autobuildLanguages[0];
|
||||
const autobuildLanguages = config.languages.filter(codeql_1.isTracedLanguage);
|
||||
language = autobuildLanguages[0];
|
||||
if (!language) {
|
||||
core.info("None of the languages in this project require extra build steps");
|
||||
return;
|
||||
@@ -38,10 +52,11 @@ async function run() {
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
|
||||
await util.reportActionFailed('autobuild', error.message, error.stack);
|
||||
console.log(error);
|
||||
await sendCompletedStatusReport(startedAt, [language], language, error);
|
||||
return;
|
||||
}
|
||||
await util.reportActionSucceeded('autobuild');
|
||||
await sendCompletedStatusReport(startedAt, [language]);
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("autobuild action failed. " + e);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAqC;AACrC,gEAAkD;AAClD,6CAA+B;AAE/B,KAAK,UAAU,GAAG;;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,EAAE;YACzF,OAAO;SACR;QAED,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,OAAA,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,8BAA8B,CAAC,0CAAE,KAAK,CAAC,GAAG,MAAK,EAAE,CAAC;QACnG,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,MAAM,IAAI,CAAC,kBAAkB,CAAC,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACvE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,CAAC;AAChD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAuD;AACvD,6DAA+C;AAC/C,6CAA+B;AAS/B,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GAAG,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5F,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CAAC,CAAC;IAChB,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,CAAC;IACb,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC7G,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,EAAE,CAAC;QAE9C,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,yBAAgB,CAAC,CAAC;QACrE,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEjC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;QACxE,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
57
lib/cli.js
generated
Normal file
57
lib/cli.js
generated
Normal file
@@ -0,0 +1,57 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const commander_1 = require("commander");
|
||||
const path = __importStar(require("path"));
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const program = new commander_1.Command();
|
||||
program.version('0.0.1');
|
||||
function parseGithubApiUrl(inputUrl) {
|
||||
try {
|
||||
const url = new URL(inputUrl);
|
||||
// If we detect this is trying to be to github.com
|
||||
// then return with a fixed canonical URL.
|
||||
if (url.hostname === 'github.com' || url.hostname === 'api.github.com') {
|
||||
return 'https://api.github.com';
|
||||
}
|
||||
// Add the API path if it's not already present.
|
||||
if (url.pathname.indexOf('/api/v3') === -1) {
|
||||
url.pathname = path.join(url.pathname, 'api', 'v3');
|
||||
}
|
||||
return url.toString();
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`"${inputUrl}" is not a valid URL`);
|
||||
}
|
||||
}
|
||||
const logger = logging_1.getCLILogger();
|
||||
program
|
||||
.command('upload')
|
||||
.description('Uploads a SARIF file, or all SARIF files from a directory, to code scanning')
|
||||
.requiredOption('--sarif-file <file>', 'SARIF file to upload; can also be a directory for uploading multiple')
|
||||
.requiredOption('--repository <repository>', 'Repository name')
|
||||
.requiredOption('--commit <commit>', 'SHA of commit that was analyzed')
|
||||
.requiredOption('--ref <ref>', 'Name of ref that was analyzed')
|
||||
.requiredOption('--github-url <url>', 'URL of GitHub instance')
|
||||
.requiredOption('--github-auth <auth>', 'GitHub Apps token, or of the form "username:token" if using a personal access token')
|
||||
.option('--checkout-path <path>', 'Checkout path (default: current working directory)')
|
||||
.action(async (cmd) => {
|
||||
try {
|
||||
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, cmd.ref, undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubApiUrl(cmd.githubUrl), 'cli', logger);
|
||||
}
|
||||
catch (e) {
|
||||
logger.error('Upload failed');
|
||||
logger.error(e);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
});
|
||||
program.parse(process.argv);
|
||||
//# sourceMappingURL=cli.js.map
|
||||
1
lib/cli.js.map
Normal file
1
lib/cli.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"cli.js","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";;;;;;;;;AAAA,yCAAoC;AACpC,2CAA6B;AAE7B,uCAAyC;AACzC,6CAAkD;AAClD,yDAA2C;AAE3C,MAAM,OAAO,GAAG,IAAI,mBAAO,EAAE,CAAC;AAC9B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAYzB,SAAS,iBAAiB,CAAC,QAAgB;IACzC,IAAI;QACF,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC;QAE9B,kDAAkD;QAClD,0CAA0C;QAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;YACtE,OAAO,wBAAwB,CAAC;SACjC;QAED,gDAAgD;QAChD,IAAI,GAAG,CAAC,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE;YAC1C,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;SACrD;QAED,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;KAEvB;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,IAAI,QAAQ,sBAAsB,CAAC,CAAC;KACrD;AACH,CAAC;AAED,MAAM,MAAM,GAAG,sBAAY,EAAE,CAAC;AAE9B,OAAO;KACJ,OAAO,CAAC,QAAQ,CAAC;KACjB,WAAW,CAAC,6EAA6E,CAAC;KAC1F,cAAc,CAAC,qBAAqB,EAAE,sEAAsE,CAAC;KAC7G,cAAc,CAAC,2BAA2B,EAAE,iBAAiB,CAAC;KAC9D,cAAc,CAAC,mBAAmB,EAAE,iCAAiC,CAAC;KACtE,cAAc,CAAC,aAAa,EAAE,+BAA+B,CAAC;KAC9D,cAAc,CAAC,oBAAoB,EAAE,wBAAwB,CAAC;KAC9D,cAAc,CAAC,sBAAsB,EAAE,qFAAqF,CAAC;KAC7H,MAAM,CAAC,wBAAwB,EAAE,oDAAoD,CAAC;KACtF,MAAM,CAAC,KAAK,EAAE,GAAe,EAAE,EAAE;IAChC,IAAI;QACF,MAAM,UAAU,CAAC,MAAM,CACrB,GAAG,CAAC,SAAS,EACb,+BAAkB,CAAC,GAAG,CAAC,UAAU,CAAC,EAClC,GAAG,CAAC,MAAM,EACV,GAAG,CAAC,GAAG,EACP,SAAS,EACT,SAAS,EACT,SAAS,EACT,GAAG,CAAC,YAAY,IAAI,OAAO,CAAC,GAAG,EAAE,EACjC,SAAS,EACT,GAAG,CAAC,UAAU,EACd,iBAAiB,CAAC,GAAG,CAAC,SAAS,CAAC,EAChC,KAAK,EACL,MAAM,CAAC,CAAC;KACX;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC;QAC9B,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QAChB,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC;KACtB;AACH,CAAC,CAAC,CAAC;AAEL,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC"}
|
||||
225
lib/codeql.js
generated
225
lib/codeql.js
generated
@@ -6,29 +6,132 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const http = __importStar(require("@actions/http-client"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const stream = __importStar(require("stream"));
|
||||
const globalutil = __importStar(require("util"));
|
||||
const v4_1 = __importDefault(require("uuid/v4"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
||||
const util = __importStar(require("./util"));
|
||||
/**
|
||||
* Stores the CodeQL object, and is populated by `setupCodeQL` or `getCodeQL`.
|
||||
* Can be overridden in tests using `setCodeQL`.
|
||||
*/
|
||||
let cachedCodeQL = undefined;
|
||||
/**
|
||||
* Environment variable used to store the location of the CodeQL CLI executable.
|
||||
* Value is set by setupCodeQL and read by getCodeQL.
|
||||
*/
|
||||
const CODEQL_ACTION_CMD = "CODEQL_ACTION_CMD";
|
||||
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
|
||||
const CODEQL_BUNDLE_NAME = "codeql-bundle.tar.gz";
|
||||
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||
function getCodeQLActionRepository() {
|
||||
// Actions do not know their own repository name,
|
||||
// so we currently use this hack to find the name based on where our files are.
|
||||
// This can be removed once the change to the runner in https://github.com/actions/runner/pull/585 is deployed.
|
||||
const runnerTemp = util.getRequiredEnvParam("RUNNER_TEMP");
|
||||
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
|
||||
const relativeScriptPath = path.relative(actionsDirectory, __filename);
|
||||
// This handles the case where the Action does not come from an Action repository,
|
||||
// e.g. our integration tests which use the Action code from the current checkout.
|
||||
if (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath)) {
|
||||
return CODEQL_DEFAULT_ACTION_REPOSITORY;
|
||||
}
|
||||
const relativeScriptPathParts = relativeScriptPath.split(path.sep);
|
||||
return relativeScriptPathParts[0] + "/" + relativeScriptPathParts[1];
|
||||
}
|
||||
async function getCodeQLBundleDownloadURL() {
|
||||
const codeQLActionRepository = getCodeQLActionRepository();
|
||||
const potentialDownloadSources = [
|
||||
// This GitHub instance, and this Action.
|
||||
[util.getInstanceAPIURL(), codeQLActionRepository],
|
||||
// This GitHub instance, and the canonical Action.
|
||||
[util.getInstanceAPIURL(), CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
// GitHub.com, and the canonical Action.
|
||||
[util.GITHUB_DOTCOM_API_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
|
||||
];
|
||||
// We now filter out any duplicates.
|
||||
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
|
||||
const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url));
|
||||
for (let downloadSource of uniqueDownloadSources) {
|
||||
let [apiURL, repository] = downloadSource;
|
||||
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
|
||||
if (apiURL === util.GITHUB_DOTCOM_API_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
|
||||
break;
|
||||
}
|
||||
let [repositoryOwner, repositoryName] = repository.split("/");
|
||||
try {
|
||||
const release = await api.getActionsApiClient().repos.getReleaseByTag({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
tag: CODEQL_BUNDLE_VERSION
|
||||
});
|
||||
for (let asset of release.data.assets) {
|
||||
if (asset.name === CODEQL_BUNDLE_NAME) {
|
||||
core.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
|
||||
return asset.url;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
|
||||
}
|
||||
}
|
||||
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${CODEQL_BUNDLE_NAME}`;
|
||||
}
|
||||
// We have to download CodeQL manually because the toolcache doesn't support Accept headers.
|
||||
// This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released.
|
||||
async function toolcacheDownloadTool(url, headers) {
|
||||
const client = new http.HttpClient('CodeQL Action');
|
||||
const dest = path.join(util.getRequiredEnvParam('RUNNER_TEMP'), v4_1.default());
|
||||
const response = await client.get(url, headers);
|
||||
if (response.message.statusCode !== 200) {
|
||||
const err = new toolcache.HTTPError(response.message.statusCode);
|
||||
core.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
||||
throw err;
|
||||
}
|
||||
const pipeline = globalutil.promisify(stream.pipeline);
|
||||
fs.mkdirSync(path.dirname(dest), { recursive: true });
|
||||
await pipeline(response.message, fs.createWriteStream(dest));
|
||||
return dest;
|
||||
}
|
||||
async function setupCodeQL() {
|
||||
try {
|
||||
const codeqlURL = core.getInput('tools', { required: true });
|
||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
|
||||
let codeqlURL = core.getInput('tools');
|
||||
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
|
||||
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
|
||||
if (codeqlFolder) {
|
||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||
}
|
||||
else {
|
||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||
if (!codeqlURL) {
|
||||
codeqlURL = await getCodeQLBundleDownloadURL();
|
||||
}
|
||||
const headers = { accept: 'application/octet-stream' };
|
||||
// We only want to provide an authorization header if we are downloading
|
||||
// from the same GitHub instance the Action is running on.
|
||||
// This avoids leaking Enterprise tokens to dotcom.
|
||||
if (codeqlURL.startsWith(util.getInstanceAPIURL() + "/")) {
|
||||
core.debug('Downloading CodeQL bundle with token.');
|
||||
let token = core.getInput('token', { required: true });
|
||||
headers.authorization = `token ${token}`;
|
||||
}
|
||||
else {
|
||||
core.debug('Downloading CodeQL bundle without token.');
|
||||
}
|
||||
let codeqlPath = await toolcacheDownloadTool(codeqlURL, headers);
|
||||
core.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
|
||||
}
|
||||
@@ -37,10 +140,11 @@ async function setupCodeQL() {
|
||||
codeqlCmd += ".exe";
|
||||
}
|
||||
else if (process.platform !== 'linux' && process.platform !== 'darwin') {
|
||||
throw new Error("Unsupported plaform: " + process.platform);
|
||||
throw new Error("Unsupported platform: " + process.platform);
|
||||
}
|
||||
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
|
||||
core.exportVariable(CODEQL_ACTION_CMD, codeqlCmd);
|
||||
return getCodeQLForCmd(codeqlCmd);
|
||||
return cachedCodeQL;
|
||||
}
|
||||
catch (e) {
|
||||
core.error(e);
|
||||
@@ -66,10 +170,42 @@ function getCodeQLURLVersion(url) {
|
||||
}
|
||||
exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
||||
function getCodeQL() {
|
||||
const codeqlCmd = util.getRequiredEnvParam(CODEQL_ACTION_CMD);
|
||||
return getCodeQLForCmd(codeqlCmd);
|
||||
if (cachedCodeQL === undefined) {
|
||||
const codeqlCmd = util.getRequiredEnvParam(CODEQL_ACTION_CMD);
|
||||
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
|
||||
}
|
||||
return cachedCodeQL;
|
||||
}
|
||||
exports.getCodeQL = getCodeQL;
|
||||
function resolveFunction(partialCodeql, methodName) {
|
||||
if (typeof partialCodeql[methodName] !== 'function') {
|
||||
const dummyMethod = () => {
|
||||
throw new Error('CodeQL ' + methodName + ' method not correctly defined');
|
||||
};
|
||||
return dummyMethod;
|
||||
}
|
||||
return partialCodeql[methodName];
|
||||
}
|
||||
/**
|
||||
* Set the functionality for CodeQL methods. Only for use in tests.
|
||||
*
|
||||
* Accepts a partial object and any undefined methods will be implemented
|
||||
* to immediately throw an exception indicating which method is missing.
|
||||
*/
|
||||
function setCodeQL(partialCodeql) {
|
||||
cachedCodeQL = {
|
||||
getDir: resolveFunction(partialCodeql, 'getDir'),
|
||||
printVersion: resolveFunction(partialCodeql, 'printVersion'),
|
||||
getTracerEnv: resolveFunction(partialCodeql, 'getTracerEnv'),
|
||||
databaseInit: resolveFunction(partialCodeql, 'databaseInit'),
|
||||
runAutobuild: resolveFunction(partialCodeql, 'runAutobuild'),
|
||||
extractScannedLanguage: resolveFunction(partialCodeql, 'extractScannedLanguage'),
|
||||
finalizeDatabase: resolveFunction(partialCodeql, 'finalizeDatabase'),
|
||||
resolveQueries: resolveFunction(partialCodeql, 'resolveQueries'),
|
||||
databaseAnalyze: resolveFunction(partialCodeql, 'databaseAnalyze')
|
||||
};
|
||||
}
|
||||
exports.setCodeQL = setCodeQL;
|
||||
function getCodeQLForCmd(cmd) {
|
||||
return {
|
||||
getDir: function () {
|
||||
@@ -89,6 +225,7 @@ function getCodeQLForCmd(cmd) {
|
||||
'trace-command',
|
||||
databasePath,
|
||||
...compilerSpecArg,
|
||||
...getExtraOptionsFromEnv(['database', 'trace-command']),
|
||||
process.execPath,
|
||||
path.resolve(__dirname, 'tracer-env.js'),
|
||||
envFile
|
||||
@@ -102,6 +239,7 @@ function getCodeQLForCmd(cmd) {
|
||||
databasePath,
|
||||
'--language=' + language,
|
||||
'--source-root=' + sourceRoot,
|
||||
...getExtraOptionsFromEnv(['database', 'init']),
|
||||
]);
|
||||
},
|
||||
runAutobuild: async function (language) {
|
||||
@@ -123,7 +261,8 @@ function getCodeQLForCmd(cmd) {
|
||||
'resolve',
|
||||
'extractor',
|
||||
'--format=json',
|
||||
'--language=' + language
|
||||
'--language=' + language,
|
||||
...getExtraOptionsFromEnv(['resolve', 'extractor']),
|
||||
], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
@@ -138,6 +277,7 @@ function getCodeQLForCmd(cmd) {
|
||||
await exec.exec(cmd, [
|
||||
'database',
|
||||
'trace-command',
|
||||
...getExtraOptionsFromEnv(['database', 'trace-command']),
|
||||
databasePath,
|
||||
'--',
|
||||
traceCommand
|
||||
@@ -147,17 +287,23 @@ function getCodeQLForCmd(cmd) {
|
||||
await exec.exec(cmd, [
|
||||
'database',
|
||||
'finalize',
|
||||
...getExtraOptionsFromEnv(['database', 'finalize']),
|
||||
databasePath
|
||||
]);
|
||||
},
|
||||
resolveQueries: async function (queries) {
|
||||
let output = '';
|
||||
await exec.exec(cmd, [
|
||||
resolveQueries: async function (queries, extraSearchPath) {
|
||||
const codeqlArgs = [
|
||||
'resolve',
|
||||
'queries',
|
||||
...queries,
|
||||
'--format=bylanguage'
|
||||
], {
|
||||
'--format=bylanguage',
|
||||
...getExtraOptionsFromEnv(['resolve', 'queries'])
|
||||
];
|
||||
if (extraSearchPath !== undefined) {
|
||||
codeqlArgs.push('--search-path', extraSearchPath);
|
||||
}
|
||||
let output = '';
|
||||
await exec.exec(cmd, codeqlArgs, {
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
output += data.toString();
|
||||
@@ -176,9 +322,62 @@ function getCodeQLForCmd(cmd) {
|
||||
'--format=sarif-latest',
|
||||
'--output=' + sarifFile,
|
||||
'--no-sarif-add-snippets',
|
||||
...getExtraOptionsFromEnv(['database', 'analyze']),
|
||||
querySuite
|
||||
]);
|
||||
}
|
||||
};
|
||||
}
|
||||
function isTracedLanguage(language) {
|
||||
return ['cpp', 'java', 'csharp'].includes(language);
|
||||
}
|
||||
exports.isTracedLanguage = isTracedLanguage;
|
||||
function isScannedLanguage(language) {
|
||||
return !isTracedLanguage(language);
|
||||
}
|
||||
exports.isScannedLanguage = isScannedLanguage;
|
||||
/**
|
||||
* Gets the options for `path` of `options` as an array of extra option strings.
|
||||
*/
|
||||
function getExtraOptionsFromEnv(path) {
|
||||
let options = util.getExtraOptionsEnvParam();
|
||||
return getExtraOptions(options, path, []);
|
||||
}
|
||||
/**
|
||||
* Gets the options for `path` of `options` as an array of extra option strings.
|
||||
*
|
||||
* - the special terminal step name '*' in `options` matches all path steps
|
||||
* - throws an exception if this conversion is impossible.
|
||||
*/
|
||||
function getExtraOptions(options, path, pathInfo) {
|
||||
var _a, _b, _c;
|
||||
/**
|
||||
* Gets `options` as an array of extra option strings.
|
||||
*
|
||||
* - throws an exception mentioning `pathInfo` if this conversion is impossible.
|
||||
*/
|
||||
function asExtraOptions(options, pathInfo) {
|
||||
if (options === undefined) {
|
||||
return [];
|
||||
}
|
||||
if (!Array.isArray(options)) {
|
||||
const msg = `The extra options for '${pathInfo.join('.')}' ('${JSON.stringify(options)}') are not in an array.`;
|
||||
throw new Error(msg);
|
||||
}
|
||||
return options.map(o => {
|
||||
const t = typeof o;
|
||||
if (t !== 'string' && t !== 'number' && t !== 'boolean') {
|
||||
const msg = `The extra option for '${pathInfo.join('.')}' ('${JSON.stringify(o)}') is not a primitive value.`;
|
||||
throw new Error(msg);
|
||||
}
|
||||
return o + '';
|
||||
});
|
||||
}
|
||||
let all = asExtraOptions((_a = options) === null || _a === void 0 ? void 0 : _a['*'], pathInfo.concat('*'));
|
||||
let specific = path.length === 0 ?
|
||||
asExtraOptions(options, pathInfo) :
|
||||
getExtraOptions((_b = options) === null || _b === void 0 ? void 0 : _b[path[0]], (_c = path) === null || _c === void 0 ? void 0 : _c.slice(1), pathInfo.concat(path[0]));
|
||||
return all.concat(specific);
|
||||
}
|
||||
exports.getExtraOptions = getExtraOptions;
|
||||
//# sourceMappingURL=codeql.js.map
|
||||
File diff suppressed because one or more lines are too long
22
lib/codeql.test.js
generated
22
lib/codeql.test.js
generated
@@ -57,4 +57,26 @@ ava_1.default('parse codeql bundle url version', t => {
|
||||
}
|
||||
}
|
||||
});
|
||||
ava_1.default('getExtraOptions works for explicit paths', t => {
|
||||
t.deepEqual(codeql.getExtraOptions({}, ['foo'], []), []);
|
||||
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ['foo'], []), ['42']);
|
||||
t.deepEqual(codeql.getExtraOptions({ foo: { bar: [42] } }, ['foo', 'bar'], []), ['42']);
|
||||
});
|
||||
ava_1.default('getExtraOptions works for wildcards', t => {
|
||||
t.deepEqual(codeql.getExtraOptions({ '*': [42] }, ['foo'], []), ['42']);
|
||||
});
|
||||
ava_1.default('getExtraOptions works for wildcards and explicit paths', t => {
|
||||
let o1 = { '*': [42], foo: [87] };
|
||||
t.deepEqual(codeql.getExtraOptions(o1, ['foo'], []), ['42', '87']);
|
||||
let o2 = { '*': [42], foo: [87] };
|
||||
t.deepEqual(codeql.getExtraOptions(o2, ['foo', 'bar'], []), ['42']);
|
||||
let o3 = { '*': [42], foo: { '*': [87], bar: [99] } };
|
||||
let p = ['foo', 'bar'];
|
||||
t.deepEqual(codeql.getExtraOptions(o3, p, []), ['42', '87', '99']);
|
||||
});
|
||||
ava_1.default('getExtraOptions throws for bad content', t => {
|
||||
t.throws(() => codeql.getExtraOptions({ '*': 42 }, ['foo'], []));
|
||||
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ['foo'], []));
|
||||
t.throws(() => codeql.getExtraOptions({ '*': [42], foo: { '*': 87, bar: [99] } }, ['foo', 'bar'], []));
|
||||
});
|
||||
//# sourceMappingURL=codeql.test.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"codeql.test.js","sourceRoot":"","sources":["../src/codeql.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,iDAAmC;AACnC,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,MAAM,CAAC,WAAW,EAAE,CAAC;YAE3B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,MAAM,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YACtD,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"codeql.test.js","sourceRoot":"","sources":["../src/codeql.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,iDAAmC;AACnC,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,MAAM,CAAC,WAAW,EAAE,CAAC;YAE3B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,MAAM,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YACtD,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,0CAA0C,EAAE,CAAC,CAAC,EAAE;IACnD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;IAEzD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;IAEtE,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AACtF,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE;IAC9C,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AACxE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,wDAAwD,EAAE,CAAC,CAAC,EAAE;IACjE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,CAAC;IAChC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;IAEnE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,CAAC;IAChC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;IAEpE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,CAAC;IACnD,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;IACvB,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;AACrE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,wCAAwC,EAAE,CAAC,CAAC,EAAE;IACjD,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAE,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAE/D,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAE,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAE/D,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;AACtG,CAAC,CAAC,CAAC"}
|
||||
509
lib/config-utils.js
generated
509
lib/config-utils.js
generated
@@ -8,103 +8,179 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const path = __importStar(require("path"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const util = __importStar(require("./util"));
|
||||
// Property names from the user-supplied config file.
|
||||
const NAME_PROPERTY = 'name';
|
||||
const DISPLAY_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
|
||||
const DISABLE_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
|
||||
const QUERIES_PROPERTY = 'queries';
|
||||
const QUERIES_USES_PROPERTY = 'uses';
|
||||
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
|
||||
const PATHS_PROPERTY = 'paths';
|
||||
class ExternalQuery {
|
||||
constructor(repository, ref) {
|
||||
this.path = '';
|
||||
this.repository = repository;
|
||||
this.ref = ref;
|
||||
// All the languages supported by CodeQL
|
||||
const ALL_LANGUAGES = ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'];
|
||||
// Some alternate names for languages
|
||||
const LANGUAGE_ALIASES = {
|
||||
'c': 'cpp',
|
||||
'typescript': 'javascript',
|
||||
};
|
||||
/**
|
||||
* A list of queries from https://github.com/github/codeql that
|
||||
* we don't want to run. Disabling them here is a quicker alternative to
|
||||
* disabling them in the code scanning query suites. Queries should also
|
||||
* be disabled in the suites, and removed from this list here once the
|
||||
* bundle is updated to make those suite changes live.
|
||||
*
|
||||
* Format is a map from language to an array of path suffixes of .ql files.
|
||||
*/
|
||||
const DISABLED_BUILTIN_QUERIES = {
|
||||
'csharp': [
|
||||
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
|
||||
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
|
||||
]
|
||||
};
|
||||
function queryIsDisabled(language, query) {
|
||||
return (DISABLED_BUILTIN_QUERIES[language] || [])
|
||||
.some(disabledQuery => query.endsWith(disabledQuery));
|
||||
}
|
||||
/**
|
||||
* Asserts that the noDeclaredLanguage and multipleDeclaredLanguages fields are
|
||||
* both empty and errors if they are not.
|
||||
*/
|
||||
function validateQueries(resolvedQueries) {
|
||||
const noDeclaredLanguage = resolvedQueries.noDeclaredLanguage;
|
||||
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
||||
if (noDeclaredLanguageQueries.length !== 0) {
|
||||
throw new Error('The following queries do not declare a language. ' +
|
||||
'Their qlpack.yml files are either missing or is invalid.\n' +
|
||||
noDeclaredLanguageQueries.join('\n'));
|
||||
}
|
||||
const multipleDeclaredLanguages = resolvedQueries.multipleDeclaredLanguages;
|
||||
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
|
||||
if (multipleDeclaredLanguagesQueries.length !== 0) {
|
||||
throw new Error('The following queries declare multiple languages. ' +
|
||||
'Their qlpack.yml files are either missing or is invalid.\n' +
|
||||
multipleDeclaredLanguagesQueries.join('\n'));
|
||||
}
|
||||
}
|
||||
exports.ExternalQuery = ExternalQuery;
|
||||
/**
|
||||
* Run 'codeql resolve queries' and add the results to resultMap
|
||||
*/
|
||||
async function runResolveQueries(resultMap, toResolve, extraSearchPath, errorOnInvalidQueries) {
|
||||
const codeQl = codeql_1.getCodeQL();
|
||||
const resolvedQueries = await codeQl.resolveQueries(toResolve, extraSearchPath);
|
||||
for (const [language, queries] of Object.entries(resolvedQueries.byLanguage)) {
|
||||
if (resultMap[language] === undefined) {
|
||||
resultMap[language] = [];
|
||||
}
|
||||
resultMap[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
|
||||
}
|
||||
if (errorOnInvalidQueries) {
|
||||
validateQueries(resolvedQueries);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get the set of queries included by default.
|
||||
*/
|
||||
async function addDefaultQueries(languages, resultMap) {
|
||||
const suites = languages.map(l => l + '-code-scanning.qls');
|
||||
await runResolveQueries(resultMap, suites, undefined, false);
|
||||
}
|
||||
// The set of acceptable values for built-in suites from the codeql bundle
|
||||
const builtinSuites = ['security-extended', 'security-and-quality'];
|
||||
class Config {
|
||||
constructor() {
|
||||
this.name = "";
|
||||
this.disableDefaultQueries = false;
|
||||
this.additionalQueries = [];
|
||||
this.externalQueries = [];
|
||||
this.additionalSuites = [];
|
||||
this.pathsIgnore = [];
|
||||
this.paths = [];
|
||||
}
|
||||
addQuery(configFile, queryUses) {
|
||||
// The logic for parsing the string is based on what actions does for
|
||||
// parsing the 'uses' actions in the workflow file
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
const localQueryPath = queryUses.slice(2);
|
||||
// Resolve the local path against the workspace so that when this is
|
||||
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
|
||||
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(absoluteQueryPath)) {
|
||||
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||
}
|
||||
// Call this after checking file exists, because it'll fail if file doesn't exist
|
||||
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
|
||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||
}
|
||||
this.additionalQueries.push(absoluteQueryPath);
|
||||
return;
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
|
||||
const suite = builtinSuites.find((suite) => suite === queryUses);
|
||||
if (suite) {
|
||||
this.additionalSuites.push(suite);
|
||||
return;
|
||||
}
|
||||
else {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
}
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split('/');
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length < 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
if (tok.length > 3) {
|
||||
tok = [tok[0], tok[1], tok.slice(2).join('/')];
|
||||
}
|
||||
// Check none of the parts of the repository name are empty
|
||||
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
|
||||
if (tok.length === 3) {
|
||||
external.path = tok[2];
|
||||
}
|
||||
this.externalQueries.push(external);
|
||||
/**
|
||||
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
||||
* Throws an error if suiteName is not a valid builtin suite.
|
||||
*/
|
||||
async function addBuiltinSuiteQueries(configFile, languages, resultMap, suiteName) {
|
||||
const suite = builtinSuites.find((suite) => suite === suiteName);
|
||||
if (!suite) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||
}
|
||||
const suites = languages.map(l => l + '-' + suiteName + '.qls');
|
||||
await runResolveQueries(resultMap, suites, undefined, false);
|
||||
}
|
||||
/**
|
||||
* Retrieve the set of queries at localQueryPath and add them to resultMap.
|
||||
*/
|
||||
async function addLocalQueries(configFile, resultMap, localQueryPath) {
|
||||
// Resolve the local path against the workspace so that when this is
|
||||
// passed to codeql it resolves to exactly the path we expect it to resolve to.
|
||||
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
|
||||
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(absoluteQueryPath)) {
|
||||
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||
}
|
||||
// Call this after checking file exists, because it'll fail if file doesn't exist
|
||||
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
|
||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||
}
|
||||
// Get the root of the current repo to use when resolving query dependencies
|
||||
const rootOfRepo = util.getRequiredEnvParam('GITHUB_WORKSPACE');
|
||||
await runResolveQueries(resultMap, [absoluteQueryPath], rootOfRepo, true);
|
||||
}
|
||||
/**
|
||||
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
|
||||
*/
|
||||
async function addRemoteQueries(configFile, resultMap, queryUses) {
|
||||
let tok = queryUses.split('@');
|
||||
if (tok.length !== 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split('/');
|
||||
// The first token is the owner
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length < 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
// Check none of the parts of the repository name are empty
|
||||
if (tok[0].trim() === '' || tok[1].trim() === '') {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
const nwo = tok[0] + '/' + tok[1];
|
||||
// Checkout the external repository
|
||||
const rootOfRepo = await externalQueries.checkoutExternalRepository(nwo, ref);
|
||||
const queryPath = tok.length > 2
|
||||
? path.join(rootOfRepo, tok.slice(2).join('/'))
|
||||
: rootOfRepo;
|
||||
await runResolveQueries(resultMap, [queryPath], rootOfRepo, true);
|
||||
}
|
||||
/**
|
||||
* Parse a query 'uses' field to a discrete set of query files and update resultMap.
|
||||
*
|
||||
* The logic for parsing the string is based on what actions does for
|
||||
* parsing the 'uses' actions in the workflow file. So it can handle
|
||||
* local paths starting with './', or references to remote repos, or
|
||||
* a finite set of hardcoded terms for builtin suites.
|
||||
*/
|
||||
async function parseQueryUses(configFile, languages, resultMap, queryUses) {
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
await addLocalQueries(configFile, resultMap, queryUses.slice(2));
|
||||
return;
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
|
||||
await addBuiltinSuiteQueries(configFile, languages, resultMap, queryUses);
|
||||
return;
|
||||
}
|
||||
// Otherwise, must be a reference to another repo
|
||||
await addRemoteQueries(configFile, resultMap, queryUses);
|
||||
}
|
||||
exports.Config = Config;
|
||||
// Regex validating stars in paths or paths-ignore entries.
|
||||
// The intention is to only allow ** to appear when immediately
|
||||
// preceded and followed by a slash.
|
||||
@@ -156,7 +232,7 @@ function getNameInvalid(configFile) {
|
||||
}
|
||||
exports.getNameInvalid = getNameInvalid;
|
||||
function getDisableDefaultQueriesInvalid(configFile) {
|
||||
return getConfigFilePropertyError(configFile, DISPLAY_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
|
||||
return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
|
||||
}
|
||||
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
|
||||
function getQueriesInvalid(configFile) {
|
||||
@@ -199,25 +275,134 @@ function getConfigFileRepoFormatInvalidMessage(configFile) {
|
||||
return error;
|
||||
}
|
||||
exports.getConfigFileRepoFormatInvalidMessage = getConfigFileRepoFormatInvalidMessage;
|
||||
function getConfigFileFormatInvalidMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" could not be read';
|
||||
function getConfigFileFormatInvalidMessage(configFile, reason) {
|
||||
return 'The configuration file "' + configFile + '" could not be read. Reason: ' + reason;
|
||||
}
|
||||
exports.getConfigFileFormatInvalidMessage = getConfigFileFormatInvalidMessage;
|
||||
function getConfigFileDirectoryGivenMessage(configFile) {
|
||||
return 'The configuration file "' + configFile + '" looks like a directory, not a file';
|
||||
}
|
||||
exports.getConfigFileDirectoryGivenMessage = getConfigFileDirectoryGivenMessage;
|
||||
function getConfigFilePropertyError(configFile, property, error) {
|
||||
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
|
||||
}
|
||||
async function initConfig() {
|
||||
let configFile = core.getInput('config-file');
|
||||
const config = new Config();
|
||||
// If no config file was provided create an empty one
|
||||
if (configFile === '') {
|
||||
core.debug('No configuration file was provided');
|
||||
return config;
|
||||
function getNoLanguagesError() {
|
||||
return "Did not detect any languages to analyze. " +
|
||||
"Please update input in workflow or check that GitHub detects the correct languages in your repository.";
|
||||
}
|
||||
exports.getNoLanguagesError = getNoLanguagesError;
|
||||
function getUnknownLanguagesError(languages) {
|
||||
return "Did not recognise the following languages: " + languages.join(', ');
|
||||
}
|
||||
exports.getUnknownLanguagesError = getUnknownLanguagesError;
|
||||
/**
|
||||
* Gets the set of languages in the current repository
|
||||
*/
|
||||
async function getLanguagesInRepo() {
|
||||
var _a;
|
||||
// Translate between GitHub's API names for languages and ours
|
||||
const codeqlLanguages = {
|
||||
'C': 'cpp',
|
||||
'C++': 'cpp',
|
||||
'C#': 'csharp',
|
||||
'Go': 'go',
|
||||
'Java': 'java',
|
||||
'JavaScript': 'javascript',
|
||||
'TypeScript': 'javascript',
|
||||
'Python': 'python',
|
||||
};
|
||||
let repo_nwo = (_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.split("/");
|
||||
if (repo_nwo) {
|
||||
let owner = repo_nwo[0];
|
||||
let repo = repo_nwo[1];
|
||||
core.debug(`GitHub repo ${owner} ${repo}`);
|
||||
const response = await api.getActionsApiClient(true).repos.listLanguages({
|
||||
owner,
|
||||
repo
|
||||
});
|
||||
core.debug("Languages API response: " + JSON.stringify(response));
|
||||
// The GitHub API is going to return languages in order of popularity,
|
||||
// When we pick a language to autobuild we want to pick the most popular traced language
|
||||
// Since sets in javascript maintain insertion order, using a set here and then splatting it
|
||||
// into an array gives us an array of languages ordered by popularity
|
||||
let languages = new Set();
|
||||
for (let lang in response.data) {
|
||||
if (lang in codeqlLanguages) {
|
||||
languages.add(codeqlLanguages[lang]);
|
||||
}
|
||||
}
|
||||
return [...languages];
|
||||
}
|
||||
else {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get the languages to analyse.
|
||||
*
|
||||
* The result is obtained from the action input parameter 'languages' if that
|
||||
* has been set, otherwise it is deduced as all languages in the repo that
|
||||
* can be analysed.
|
||||
*
|
||||
* If no languages could be detected from either the workflow or the repository
|
||||
* then throw an error.
|
||||
*/
|
||||
async function getLanguages() {
|
||||
// Obtain from action input 'languages' if set
|
||||
let languages = core.getInput('languages', { required: false })
|
||||
.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
core.info("Languages from configuration: " + JSON.stringify(languages));
|
||||
if (languages.length === 0) {
|
||||
// Obtain languages as all languages in the repo that can be analysed
|
||||
languages = await getLanguagesInRepo();
|
||||
core.info("Automatically detected languages: " + JSON.stringify(languages));
|
||||
}
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
if (languages.length === 0) {
|
||||
throw new Error(getNoLanguagesError());
|
||||
}
|
||||
// Make sure they are supported
|
||||
const checkedLanguages = [];
|
||||
const unknownLanguages = [];
|
||||
for (let language of languages) {
|
||||
// Normalise to lower case
|
||||
language = language.toLowerCase();
|
||||
// Resolve any known aliases
|
||||
if (language in LANGUAGE_ALIASES) {
|
||||
language = LANGUAGE_ALIASES[language];
|
||||
}
|
||||
const checkedLanguage = ALL_LANGUAGES.find(l => l === language);
|
||||
if (checkedLanguage === undefined) {
|
||||
unknownLanguages.push(language);
|
||||
}
|
||||
else if (checkedLanguages.indexOf(checkedLanguage) === -1) {
|
||||
checkedLanguages.push(checkedLanguage);
|
||||
}
|
||||
}
|
||||
if (unknownLanguages.length > 0) {
|
||||
throw new Error(getUnknownLanguagesError(unknownLanguages));
|
||||
}
|
||||
return checkedLanguages;
|
||||
}
|
||||
/**
|
||||
* Get the default config for when the user has not supplied one.
|
||||
*/
|
||||
async function getDefaultConfig() {
|
||||
const languages = await getLanguages();
|
||||
const queries = {};
|
||||
await addDefaultQueries(languages, queries);
|
||||
return {
|
||||
languages: languages,
|
||||
queries: queries,
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
};
|
||||
}
|
||||
exports.getDefaultConfig = getDefaultConfig;
|
||||
/**
|
||||
* Load the config from the given file.
|
||||
*/
|
||||
async function loadConfig(configFile) {
|
||||
let parsedYAML;
|
||||
if (isLocal(configFile)) {
|
||||
// Treat the config file as relative to the workspace
|
||||
@@ -228,6 +413,8 @@ async function initConfig() {
|
||||
else {
|
||||
parsedYAML = await getRemoteConfig(configFile);
|
||||
}
|
||||
// Validate that the 'name' property is syntactically correct,
|
||||
// even though we don't use the value yet.
|
||||
if (NAME_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
@@ -235,24 +422,31 @@ async function initConfig() {
|
||||
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
config.name = parsedYAML[NAME_PROPERTY];
|
||||
}
|
||||
if (DISPLAY_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||
const languages = await getLanguages();
|
||||
const queries = {};
|
||||
const pathsIgnore = [];
|
||||
const paths = [];
|
||||
let disableDefaultQueries = false;
|
||||
if (DISABLE_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
||||
}
|
||||
config.disableDefaultQueries = parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY];
|
||||
disableDefaultQueries = parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY];
|
||||
}
|
||||
if (!disableDefaultQueries) {
|
||||
await addDefaultQueries(languages, queries);
|
||||
}
|
||||
if (QUERIES_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
}
|
||||
parsedYAML[QUERIES_PROPERTY].forEach(query => {
|
||||
for (const query of parsedYAML[QUERIES_PROPERTY]) {
|
||||
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
config.addQuery(configFile, query[QUERIES_USES_PROPERTY]);
|
||||
});
|
||||
await parseQueryUses(configFile, languages, queries, query[QUERIES_USES_PROPERTY]);
|
||||
}
|
||||
}
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
||||
@@ -262,7 +456,7 @@ async function initConfig() {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
config.pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile));
|
||||
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile));
|
||||
});
|
||||
}
|
||||
if (PATHS_PROPERTY in parsedYAML) {
|
||||
@@ -273,11 +467,47 @@ async function initConfig() {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
config.paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile));
|
||||
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile));
|
||||
});
|
||||
}
|
||||
// The list of queries should not be empty for any language. If it is then
|
||||
// it is a user configuration error.
|
||||
for (const language of languages) {
|
||||
if (queries[language] === undefined || queries[language].length === 0) {
|
||||
throw new Error(`Did not detect any queries to run for ${language}. ` +
|
||||
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
||||
}
|
||||
}
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
pathsIgnore,
|
||||
paths,
|
||||
originalUserInput: parsedYAML
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Load and return the config.
|
||||
*
|
||||
* This will parse the config from the user input if present, or generate
|
||||
* a default config. The parsed config is then stored to a known location.
|
||||
*/
|
||||
async function initConfig() {
|
||||
const configFile = core.getInput('config-file');
|
||||
let config;
|
||||
// If no config file was provided create an empty one
|
||||
if (configFile === '') {
|
||||
core.debug('No configuration file was provided');
|
||||
config = await getDefaultConfig();
|
||||
}
|
||||
else {
|
||||
config = await loadConfig(configFile);
|
||||
}
|
||||
// Save the config so we can easily access it again in the future
|
||||
await saveConfig(config);
|
||||
return config;
|
||||
}
|
||||
exports.initConfig = initConfig;
|
||||
function isLocal(configPath) {
|
||||
// If the path starts with ./, look locally
|
||||
if (configPath.indexOf("./") === 0) {
|
||||
@@ -304,53 +534,50 @@ async function getRemoteConfig(configFile) {
|
||||
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
||||
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
||||
}
|
||||
const response = await api.getApiClient().repos.getContents({
|
||||
owner: pieces.groups.owner,
|
||||
repo: pieces.groups.repo,
|
||||
path: pieces.groups.path,
|
||||
ref: pieces.groups.ref,
|
||||
});
|
||||
let fileContents;
|
||||
if ("content" in response.data && response.data.content !== undefined) {
|
||||
fileContents = response.data.content;
|
||||
try {
|
||||
fileContents = await util.getFileContentsUsingAPI(pieces.groups.owner, pieces.groups.repo, pieces.groups.path, pieces.groups.ref);
|
||||
}
|
||||
else if (Array.isArray(response.data)) {
|
||||
throw new Error(getConfigFileDirectoryGivenMessage(configFile));
|
||||
catch (err) {
|
||||
throw new Error(getConfigFileFormatInvalidMessage(configFile, err.message));
|
||||
}
|
||||
else {
|
||||
throw new Error(getConfigFileFormatInvalidMessage(configFile));
|
||||
}
|
||||
return yaml.safeLoad(Buffer.from(fileContents, 'base64').toString('binary'));
|
||||
return yaml.safeLoad(fileContents);
|
||||
}
|
||||
function getConfigFolder() {
|
||||
return util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
/**
|
||||
* Get the file path where the parsed config will be stored.
|
||||
*/
|
||||
function getPathToParsedConfigFile() {
|
||||
return path.join(util.getRequiredEnvParam('RUNNER_TEMP'), 'config');
|
||||
}
|
||||
function getConfigFile() {
|
||||
return path.join(getConfigFolder(), 'config');
|
||||
}
|
||||
exports.getConfigFile = getConfigFile;
|
||||
exports.getPathToParsedConfigFile = getPathToParsedConfigFile;
|
||||
/**
|
||||
* Store the given config to the path returned from getPathToParsedConfigFile.
|
||||
*/
|
||||
async function saveConfig(config) {
|
||||
const configString = JSON.stringify(config);
|
||||
await io.mkdirP(getConfigFolder());
|
||||
fs.writeFileSync(getConfigFile(), configString, 'utf8');
|
||||
const configFile = getPathToParsedConfigFile();
|
||||
fs.mkdirSync(path.dirname(configFile), { recursive: true });
|
||||
fs.writeFileSync(configFile, configString, 'utf8');
|
||||
core.debug('Saved config:');
|
||||
core.debug(configString);
|
||||
}
|
||||
async function loadConfig() {
|
||||
const configFile = getConfigFile();
|
||||
if (fs.existsSync(configFile)) {
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
core.debug(configString);
|
||||
return JSON.parse(configString);
|
||||
}
|
||||
else {
|
||||
const config = await initConfig();
|
||||
core.debug('Initialized config:');
|
||||
core.debug(JSON.stringify(config));
|
||||
await saveConfig(config);
|
||||
return config;
|
||||
/**
|
||||
* Get the config.
|
||||
*
|
||||
* If this is the first time in a workflow that this is being called then
|
||||
* this will parse the config from the user input. The parsed config is then
|
||||
* stored to a known location. On the second and further calls, this will
|
||||
* return the contents of the parsed config from the known location.
|
||||
*/
|
||||
async function getConfig() {
|
||||
const configFile = getPathToParsedConfigFile();
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||
}
|
||||
const configString = fs.readFileSync(configFile, 'utf8');
|
||||
core.debug('Loaded config:');
|
||||
core.debug(configString);
|
||||
return JSON.parse(configString);
|
||||
}
|
||||
exports.loadConfig = loadConfig;
|
||||
exports.getConfig = getConfig;
|
||||
//# sourceMappingURL=config-utils.js.map
|
||||
File diff suppressed because one or more lines are too long
270
lib/config-utils.test.js
generated
270
lib/config-utils.test.js
generated
@@ -16,10 +16,11 @@ const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const sinon_1 = __importDefault(require("sinon"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const CodeQL = __importStar(require("./codeql"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const testingUtils = __importStar(require("./testing-utils"));
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
testingUtils.setupTests(ava_1.default);
|
||||
function setInput(name, value) {
|
||||
// Transformation copied from
|
||||
// https://github.com/actions/toolkit/blob/05e39f551d33e1688f61b209ab5cdd335198f1b8/packages/core/src/core.ts#L69
|
||||
@@ -31,37 +32,62 @@ function setInput(name, value) {
|
||||
delete process.env[envVar];
|
||||
}
|
||||
}
|
||||
function mockGetContents(content) {
|
||||
function mockListLanguages(languages) {
|
||||
// Passing an auth token is required, so we just use a dummy value
|
||||
let client = new github.GitHub('123');
|
||||
const response = {
|
||||
data: content
|
||||
data: {},
|
||||
};
|
||||
const spyGetContents = sinon_1.default.stub(client.repos, "getContents").resolves(response);
|
||||
for (const language of languages) {
|
||||
response.data[language] = 123;
|
||||
}
|
||||
sinon_1.default.stub(client.repos, "listLanguages").resolves(response);
|
||||
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
||||
return spyGetContents;
|
||||
}
|
||||
ava_1.default("load empty config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', undefined);
|
||||
const config = await configUtils.loadConfig();
|
||||
t.deepEqual(config, new configUtils.Config());
|
||||
setInput('languages', 'javascript,python');
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
const config = await configUtils.initConfig();
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig());
|
||||
});
|
||||
});
|
||||
ava_1.default("loading config saves config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const configFile = configUtils.getConfigFile();
|
||||
setInput('config-file', undefined);
|
||||
setInput('languages', 'javascript,python');
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
// Sanity check the saved config file does not already exist
|
||||
t.false(fs.existsSync(configFile));
|
||||
const config = await configUtils.loadConfig();
|
||||
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile()));
|
||||
// Sanity check that getConfig throws before we have called initConfig
|
||||
await t.throwsAsync(configUtils.getConfig);
|
||||
const config1 = await configUtils.initConfig();
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configFile));
|
||||
// And the contents should parse correctly to the config that was returned
|
||||
t.deepEqual(fs.readFileSync(configFile, 'utf8'), JSON.stringify(config));
|
||||
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile()));
|
||||
// And that same newly-initialised config should now be returned by getConfig
|
||||
const config2 = await configUtils.getConfig();
|
||||
t.deepEqual(config1, config2);
|
||||
});
|
||||
});
|
||||
ava_1.default("load input outside of workspace", async (t) => {
|
||||
@@ -70,8 +96,8 @@ ava_1.default("load input outside of workspace", async (t) => {
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('config-file', '../input');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
|
||||
@@ -85,8 +111,8 @@ ava_1.default("load non-local input with invalid repo syntax", async (t) => {
|
||||
// no filename given, just a repo
|
||||
setInput('config-file', 'octo-org/codeql-config@main');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage('octo-org/codeql-config@main')));
|
||||
@@ -99,9 +125,10 @@ ava_1.default("load non-existent input", async (t) => {
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
t.false(fs.existsSync(path.join(tmpDir, 'input')));
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
|
||||
@@ -112,10 +139,115 @@ ava_1.default("load non-empty input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {
|
||||
'javascript': {
|
||||
'/foo/a.ql': {},
|
||||
'/bar/b.ql': {},
|
||||
},
|
||||
},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
// Just create a generic config object with non-default values for all fields
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- uses: ./foo
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = {
|
||||
languages: ['javascript'],
|
||||
queries: { 'javascript': ['/foo/a.ql', '/bar/b.ql'] },
|
||||
pathsIgnore: ['a', 'b'],
|
||||
paths: ['c/d'],
|
||||
originalUserInput: {
|
||||
name: 'my config',
|
||||
'disable-default-queries': true,
|
||||
queries: [{ uses: './foo' }],
|
||||
'paths-ignore': ['a', 'b'],
|
||||
paths: ['c/d'],
|
||||
},
|
||||
};
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
const actualConfig = await configUtils.initConfig();
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
ava_1.default("default queries are used", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
// Check that the default behaviour is to add the default queries.
|
||||
// In this case if a config file is specified but does not include
|
||||
// the disable-default-queries field.
|
||||
// We determine this by whether CodeQL.resolveQueries is called
|
||||
// with the correct arguments.
|
||||
const resolveQueriesArgs = [];
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function (queries, extraSearchPath) {
|
||||
resolveQueriesArgs.push({ queries, extraSearchPath });
|
||||
return {
|
||||
byLanguage: {
|
||||
'javascript': {
|
||||
'foo.ql': {},
|
||||
},
|
||||
},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
// The important point of this config is that it doesn't specify
|
||||
// the disable-default-queries field.
|
||||
// Any other details are hopefully irrelevant for this tetst.
|
||||
const inputFileContents = `
|
||||
paths:
|
||||
- foo`;
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
await configUtils.initConfig();
|
||||
// Check resolve queries was called correctly
|
||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||
t.deepEqual(resolveQueriesArgs[0].queries, ['javascript-code-scanning.qls']);
|
||||
t.deepEqual(resolveQueriesArgs[0].extraSearchPath, undefined);
|
||||
});
|
||||
});
|
||||
ava_1.default("API client used when reading remote config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {
|
||||
'javascript': {
|
||||
'foo.ql': {},
|
||||
},
|
||||
},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- uses: ./
|
||||
- uses: ./foo
|
||||
@@ -123,45 +255,17 @@ ava_1.default("load non-empty input", async (t) => {
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo'));
|
||||
// And the config we expect it to parse to
|
||||
const expectedConfig = new configUtils.Config();
|
||||
expectedConfig.name = 'my config';
|
||||
expectedConfig.disableDefaultQueries = true;
|
||||
expectedConfig.additionalQueries.push(fs.realpathSync(tmpDir));
|
||||
expectedConfig.additionalQueries.push(fs.realpathSync(path.join(tmpDir, 'foo')));
|
||||
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
|
||||
expectedConfig.pathsIgnore = ['a', 'b'];
|
||||
expectedConfig.paths = ['c/d'];
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
const actualConfig = await configUtils.loadConfig();
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
});
|
||||
ava_1.default("API client used when reading remote config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
disable-default-queries: true
|
||||
queries:
|
||||
- uses: ./
|
||||
paths-ignore:
|
||||
- a
|
||||
- b
|
||||
paths:
|
||||
- c/d`;
|
||||
const dummyResponse = {
|
||||
content: Buffer.from(inputFileContents).toString("base64"),
|
||||
};
|
||||
const spyGetContents = mockGetContents(dummyResponse);
|
||||
const spyGetContents = testingUtils.mockGetContents(dummyResponse, 200);
|
||||
// Create checkout directory for remote queries repository
|
||||
fs.mkdirSync(path.join(tmpDir, 'foo/bar'), { recursive: true });
|
||||
setInput('config-file', 'octo-org/codeql-config/config.yaml@main');
|
||||
await configUtils.loadConfig();
|
||||
setInput('languages', 'javascript');
|
||||
await configUtils.initConfig();
|
||||
t.assert(spyGetContents.called);
|
||||
});
|
||||
});
|
||||
@@ -170,15 +274,16 @@ ava_1.default("Remote config handles the case where a directory is provided", as
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
const dummyResponse = []; // directories are returned as arrays
|
||||
mockGetContents(dummyResponse);
|
||||
testingUtils.mockGetContents(dummyResponse, 200);
|
||||
const repoReference = 'octo-org/codeql-config/config.yaml@main';
|
||||
setInput('config-file', repoReference);
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDirectoryGivenMessage(repoReference)));
|
||||
const reason = util.fileIsADirectoryError(repoReference);
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference, reason)));
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -189,15 +294,44 @@ ava_1.default("Invalid format of remote config handled correctly", async (t) =>
|
||||
const dummyResponse = {
|
||||
// note no "content" property here
|
||||
};
|
||||
mockGetContents(dummyResponse);
|
||||
testingUtils.mockGetContents(dummyResponse, 400);
|
||||
const repoReference = 'octo-org/codeql-config/config.yaml@main';
|
||||
setInput('config-file', repoReference);
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference)));
|
||||
const reason = util.fileDownloadError(repoReference);
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference, reason)));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("No detected languages", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
mockListLanguages([]);
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getNoLanguagesError()));
|
||||
}
|
||||
});
|
||||
});
|
||||
ava_1.default("Unknown languages", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
setInput('languages', 'ruby,english');
|
||||
try {
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(['ruby', 'english'])));
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -206,12 +340,22 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env['RUNNER_TEMP'] = tmpDir;
|
||||
process.env['GITHUB_WORKSPACE'] = tmpDir;
|
||||
CodeQL.setCodeQL({
|
||||
resolveQueries: async function () {
|
||||
return {
|
||||
byLanguage: {},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
const inputFile = path.join(tmpDir, 'input');
|
||||
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
setInput('languages', 'javascript');
|
||||
try {
|
||||
await configUtils.loadConfig();
|
||||
throw new Error('loadConfig did not throw error');
|
||||
await configUtils.initConfig();
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
|
||||
|
||||
File diff suppressed because one or more lines are too long
3
lib/defaults.json
Normal file
3
lib/defaults.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20200630"
|
||||
}
|
||||
31
lib/external-queries.js
generated
31
lib/external-queries.js
generated
@@ -12,22 +12,23 @@ const exec = __importStar(require("@actions/exec"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function checkoutExternalQueries(config) {
|
||||
/**
|
||||
* Check out repository at the given ref, and return the directory of the checkout.
|
||||
*/
|
||||
async function checkoutExternalRepository(repository, ref) {
|
||||
const folder = util.getRequiredEnvParam('RUNNER_TEMP');
|
||||
for (const externalQuery of config.externalQueries) {
|
||||
core.info('Checking out ' + externalQuery.repository);
|
||||
const checkoutLocation = path.join(folder, externalQuery.repository);
|
||||
if (!fs.existsSync(checkoutLocation)) {
|
||||
const repoURL = 'https://github.com/' + externalQuery.repository + '.git';
|
||||
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
|
||||
await exec.exec('git', [
|
||||
'--work-tree=' + checkoutLocation,
|
||||
'--git-dir=' + checkoutLocation + '/.git',
|
||||
'checkout', externalQuery.ref,
|
||||
]);
|
||||
}
|
||||
config.additionalQueries.push(path.join(checkoutLocation, externalQuery.path));
|
||||
core.info('Checking out ' + repository);
|
||||
const checkoutLocation = path.join(folder, repository);
|
||||
if (!fs.existsSync(checkoutLocation)) {
|
||||
const repoURL = 'https://github.com/' + repository + '.git';
|
||||
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
|
||||
await exec.exec('git', [
|
||||
'--work-tree=' + checkoutLocation,
|
||||
'--git-dir=' + checkoutLocation + '/.git',
|
||||
'checkout', ref,
|
||||
]);
|
||||
}
|
||||
return checkoutLocation;
|
||||
}
|
||||
exports.checkoutExternalQueries = checkoutExternalQueries;
|
||||
exports.checkoutExternalRepository = checkoutExternalRepository;
|
||||
//# sourceMappingURL=external-queries.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAG7B,6CAA+B;AAExB,KAAK,UAAU,uBAAuB,CAAC,MAA0B;IACtE,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,KAAK,MAAM,aAAa,IAAI,MAAM,CAAC,eAAe,EAAE;QAClD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,UAAU,CAAC,CAAC;QAEtD,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,aAAa,CAAC,UAAU,CAAC,CAAC;QACrE,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;YACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,aAAa,CAAC,UAAU,GAAG,MAAM,CAAC;YAC1E,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;YAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;gBACrB,cAAc,GAAG,gBAAgB;gBACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;gBACzC,UAAU,EAAE,aAAa,CAAC,GAAG;aAC9B,CAAC,CAAC;SACJ;QAED,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;KAChF;AACH,CAAC;AAnBD,0DAmBC"}
|
||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B,6CAA+B;AAE/B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAAC,UAAkB,EAAE,GAAW;IAC9E,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,UAAU,CAAC,CAAC;IAExC,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;IACvD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,UAAU,GAAG,MAAM,CAAC;QAC5D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;QAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACrB,cAAc,GAAG,gBAAgB;YACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;YACzC,UAAU,EAAE,GAAG;SAChB,CAAC,CAAC;KACJ;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAjBD,gEAiBC"}
|
||||
9
lib/external-queries.test.js
generated
9
lib/external-queries.test.js
generated
@@ -13,20 +13,15 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default("checkoutExternalQueries", async (t) => {
|
||||
let config = new configUtils.Config();
|
||||
config.externalQueries = [
|
||||
new configUtils.ExternalQuery("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b"),
|
||||
];
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
|
||||
await externalQueries.checkoutExternalRepository("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b");
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in the default branch
|
||||
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,4DAA8C;AAC9C,oEAAsD;AACtD,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,eAAe,GAAG;QACvB,IAAI,WAAW,CAAC,aAAa,CAAC,kBAAkB,EAAE,0CAA0C,CAAC;KAC9F,CAAC;IAEF,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtD,uFAAuF;QACvF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IAC/E,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,0BAA0B,CAAC,kBAAkB,EAAE,0CAA0C,CAAC,CAAC;QAEjH,mGAAmG;QACnG,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IAC/E,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
155
lib/finalize-db.js
generated
155
lib/finalize-db.js
generated
@@ -8,148 +8,105 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
/**
|
||||
* A list of queries from https://github.com/github/codeql that
|
||||
* we don't want to run. Disabling them here is a quicker alternative to
|
||||
* disabling them in the code scanning query suites. Queries should also
|
||||
* be disabled in the suites, and removed from this list here once the
|
||||
* bundle is updated to make those suite changes live.
|
||||
*
|
||||
* Format is a map from language to an array of path suffixes of .ql files.
|
||||
*/
|
||||
const DISABLED_BUILTIN_QUERIES = {
|
||||
'csharp': [
|
||||
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
|
||||
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
|
||||
]
|
||||
};
|
||||
function queryIsDisabled(language, query) {
|
||||
return (DISABLED_BUILTIN_QUERIES[language] || [])
|
||||
.some(disabledQuery => query.endsWith(disabledQuery));
|
||||
async function sendStatusReport(startedAt, queriesStats, uploadStats, error) {
|
||||
var _a, _b, _c;
|
||||
const status = ((_a = queriesStats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined ? 'failure' : 'success';
|
||||
const statusReportBase = await util.createStatusReportBase('finish', status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...(queriesStats || {}),
|
||||
...(uploadStats || {}),
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function createdDBForScannedLanguages(databaseFolder) {
|
||||
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||
if (scannedLanguages) {
|
||||
const codeql = codeql_1.getCodeQL();
|
||||
for (const language of scannedLanguages.split(',')) {
|
||||
async function createdDBForScannedLanguages(databaseFolder, config) {
|
||||
const codeql = codeql_1.getCodeQL();
|
||||
for (const language of config.languages) {
|
||||
if (codeql_1.isScannedLanguage(language)) {
|
||||
core.startGroup('Extracting ' + language);
|
||||
await codeql.extractScannedLanguage(path.join(databaseFolder, language), language);
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
}
|
||||
async function finalizeDatabaseCreation(databaseFolder) {
|
||||
await createdDBForScannedLanguages(databaseFolder);
|
||||
const languages = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES] || '';
|
||||
async function finalizeDatabaseCreation(databaseFolder, config) {
|
||||
await createdDBForScannedLanguages(databaseFolder, config);
|
||||
const codeql = codeql_1.getCodeQL();
|
||||
for (const language of languages.split(',')) {
|
||||
for (const language of config.languages) {
|
||||
core.startGroup('Finalizing ' + language);
|
||||
await codeql.finalizeDatabase(path.join(databaseFolder, language));
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
async function resolveQueryLanguages(config) {
|
||||
let res = new Map();
|
||||
const codeql = codeql_1.getCodeQL();
|
||||
if (!config.disableDefaultQueries || config.additionalSuites.length !== 0) {
|
||||
const suites = [];
|
||||
for (const language of await util.getLanguages()) {
|
||||
if (!config.disableDefaultQueries) {
|
||||
suites.push(language + '-code-scanning.qls');
|
||||
}
|
||||
for (const additionalSuite of config.additionalSuites) {
|
||||
suites.push(language + '-' + additionalSuite + '.qls');
|
||||
}
|
||||
}
|
||||
const resolveQueriesOutputObject = await codeql.resolveQueries(suites);
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
if (res[language] === undefined) {
|
||||
res[language] = [];
|
||||
}
|
||||
res[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
|
||||
}
|
||||
}
|
||||
if (config.additionalQueries.length !== 0) {
|
||||
const resolveQueriesOutputObject = await codeql.resolveQueries(config.additionalQueries);
|
||||
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
|
||||
if (res[language] === undefined) {
|
||||
res[language] = [];
|
||||
}
|
||||
res[language].push(...Object.keys(queries));
|
||||
}
|
||||
const noDeclaredLanguage = resolveQueriesOutputObject.noDeclaredLanguage;
|
||||
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
||||
if (noDeclaredLanguageQueries.length !== 0) {
|
||||
throw new Error('Some queries do not declare a language, their qlpack.yml file is missing or is invalid');
|
||||
}
|
||||
const multipleDeclaredLanguages = resolveQueriesOutputObject.multipleDeclaredLanguages;
|
||||
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
|
||||
if (multipleDeclaredLanguagesQueries.length !== 0) {
|
||||
throw new Error('Some queries declare multiple languages, their qlpack.yml file is missing or is invalid');
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(databaseFolder, sarifFolder, config) {
|
||||
const queriesPerLanguage = await resolveQueryLanguages(config);
|
||||
const codeql = codeql_1.getCodeQL();
|
||||
for (let database of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + database);
|
||||
const queries = queriesPerLanguage[database] || [];
|
||||
for (let language of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + language);
|
||||
const queries = config.queries[language] || [];
|
||||
if (queries.length === 0) {
|
||||
throw new Error('Unable to analyse ' + database + ' as no queries were selected for this language');
|
||||
throw new Error('Unable to analyse ' + language + ' as no queries were selected for this language');
|
||||
}
|
||||
try {
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = path.join(databaseFolder, language + '-queries.qls');
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + language + '...\n' + querySuiteContents);
|
||||
const sarifFile = path.join(sarifFolder, language + '.sarif');
|
||||
await codeql.databaseAnalyze(path.join(databaseFolder, language), sarifFile, querySuite);
|
||||
core.debug('SARIF results for database ' + language + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
}
|
||||
catch (e) {
|
||||
// For now the fields about query performance are not populated
|
||||
return {
|
||||
analyze_failure_language: language,
|
||||
};
|
||||
}
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = path.join(databaseFolder, database + '-queries.qls');
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + database + '...\n' + querySuiteContents);
|
||||
const sarifFile = path.join(sarifFolder, database + '.sarif');
|
||||
await codeql.databaseAnalyze(path.join(databaseFolder, database), sarifFile, querySuite);
|
||||
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
}
|
||||
return {};
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let queriesStats = undefined;
|
||||
let uploadStats = undefined;
|
||||
try {
|
||||
if (util.should_abort('finish', true) || !await util.reportActionStarting('finish')) {
|
||||
util.prepareLocalRunEnvironment();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('finish', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
const config = await configUtils.loadConfig();
|
||||
const config = await configUtils.getConfig();
|
||||
core.exportVariable(sharedEnv.ODASA_TRACER_CONFIGURATION, '');
|
||||
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
|
||||
const databaseFolder = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_DATABASE_DIR);
|
||||
const databaseFolder = util.getCodeQLDatabasesDir();
|
||||
const sarifFolder = core.getInput('output');
|
||||
await io.mkdirP(sarifFolder);
|
||||
fs.mkdirSync(sarifFolder, { recursive: true });
|
||||
core.info('Finalizing database creation');
|
||||
await finalizeDatabaseCreation(databaseFolder);
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
await finalizeDatabaseCreation(databaseFolder, config);
|
||||
core.info('Analyzing database');
|
||||
await runQueries(databaseFolder, sarifFolder, config);
|
||||
queriesStats = await runQueries(databaseFolder, sarifFolder, config);
|
||||
if ('true' === core.getInput('upload')) {
|
||||
if (!await upload_lib.upload(sarifFolder)) {
|
||||
await util.reportActionFailed('finish', 'upload');
|
||||
return;
|
||||
}
|
||||
uploadStats = await upload_lib.upload(sarifFolder, repository_1.parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')), await util.getCommitOid(), util.getRef(), await util.getAnalysisKey(), util.getRequiredEnvParam('GITHUB_WORKFLOW'), util.getWorkflowRunID(), core.getInput('checkout_path'), core.getInput('matrix'), core.getInput('token'), util.getRequiredEnvParam('GITHUB_API_URL'), 'actions', logging_1.getActionsLogger());
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('finish', error.message, error.stack);
|
||||
console.log(error);
|
||||
await sendStatusReport(startedAt, queriesStats, uploadStats, error);
|
||||
return;
|
||||
}
|
||||
await util.reportActionSucceeded('finish');
|
||||
await sendStatusReport(startedAt, queriesStats, uploadStats);
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("analyze action failed: " + e);
|
||||
|
||||
File diff suppressed because one or more lines are too long
25
lib/fingerprints.js
generated
25
lib/fingerprints.js
generated
@@ -146,10 +146,10 @@ function locationUpdateCallback(result, location) {
|
||||
result.partialFingerprints.primaryLocationLineHash = hash;
|
||||
}
|
||||
else if (existingFingerprint !== hash) {
|
||||
core.warning("Calculated fingerprint of " + hash +
|
||||
" for file " + location.physicalLocation.artifactLocation.uri +
|
||||
" line " + lineNumber +
|
||||
", but found existing inconsistent fingerprint value " + existingFingerprint);
|
||||
core.warning('Calculated fingerprint of ' + hash +
|
||||
' for file ' + location.physicalLocation.artifactLocation.uri +
|
||||
' line ' + lineNumber +
|
||||
', but found existing inconsistent fingerprint value ' + existingFingerprint);
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -164,14 +164,14 @@ function resolveUriToFile(location, artifacts) {
|
||||
location.index < 0 ||
|
||||
location.index >= artifacts.length ||
|
||||
typeof artifacts[location.index].location !== 'object') {
|
||||
core.debug('Ignoring location as index "' + location.index + '" is invalid');
|
||||
core.debug(`Ignoring location as URI "${location.index}" is invalid`);
|
||||
return undefined;
|
||||
}
|
||||
location = artifacts[location.index].location;
|
||||
}
|
||||
// Get the URI and decode
|
||||
if (typeof location.uri !== 'string') {
|
||||
core.debug('Ignoring location as uri "' + location.uri + '" is invalid');
|
||||
core.debug(`Ignoring location as index "${location.uri}" is invalid`);
|
||||
return undefined;
|
||||
}
|
||||
let uri = decodeURIComponent(location.uri);
|
||||
@@ -181,13 +181,13 @@ function resolveUriToFile(location, artifacts) {
|
||||
uri = uri.substring(fileUriPrefix.length);
|
||||
}
|
||||
if (uri.indexOf('://') !== -1) {
|
||||
core.debug('Ignoring location URI "' + uri + "' as the scheme is not recognised");
|
||||
core.debug(`Ignoring location URI "${uri}" as the scheme is not recognised`);
|
||||
return undefined;
|
||||
}
|
||||
// Discard any absolute paths that aren't in the src root
|
||||
const srcRootPrefix = process.env['GITHUB_WORKSPACE'] + '/';
|
||||
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
|
||||
core.debug('Ignoring location URI "' + uri + "' as it is outside of the src root");
|
||||
core.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
|
||||
return undefined;
|
||||
}
|
||||
// Just assume a relative path is relative to the src root.
|
||||
@@ -198,7 +198,7 @@ function resolveUriToFile(location, artifacts) {
|
||||
}
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(uri)) {
|
||||
core.debug("Unable to compute fingerprint for non-existent file: " + uri);
|
||||
core.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
|
||||
return undefined;
|
||||
}
|
||||
return uri;
|
||||
@@ -207,6 +207,7 @@ exports.resolveUriToFile = resolveUriToFile;
|
||||
// Compute fingerprints for results in the given sarif file
|
||||
// and return an updated sarif file contents.
|
||||
function addFingerprints(sarifContents) {
|
||||
var _a, _b;
|
||||
let sarif = JSON.parse(sarifContents);
|
||||
// Gather together results for the same file and construct
|
||||
// callbacks to accept hashes for that file and update the location
|
||||
@@ -217,10 +218,8 @@ function addFingerprints(sarifContents) {
|
||||
for (const result of run.results || []) {
|
||||
// Check the primary location is defined correctly and is in the src root
|
||||
const primaryLocation = (result.locations || [])[0];
|
||||
if (!primaryLocation ||
|
||||
!primaryLocation.physicalLocation ||
|
||||
!primaryLocation.physicalLocation.artifactLocation) {
|
||||
core.debug("Unable to compute fingerprint for invalid location: " + JSON.stringify(primaryLocation));
|
||||
if (!((_b = (_a = primaryLocation) === null || _a === void 0 ? void 0 : _a.physicalLocation) === null || _b === void 0 ? void 0 : _b.artifactLocation)) {
|
||||
core.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
|
||||
continue;
|
||||
}
|
||||
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts);
|
||||
|
||||
File diff suppressed because one or more lines are too long
26
lib/logging.js
generated
Normal file
26
lib/logging.js
generated
Normal file
@@ -0,0 +1,26 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
function getActionsLogger() {
|
||||
return core;
|
||||
}
|
||||
exports.getActionsLogger = getActionsLogger;
|
||||
function getCLILogger() {
|
||||
return {
|
||||
debug: console.debug,
|
||||
info: console.info,
|
||||
warning: console.warn,
|
||||
error: console.error,
|
||||
startGroup: () => undefined,
|
||||
endGroup: () => undefined,
|
||||
};
|
||||
}
|
||||
exports.getCLILogger = getCLILogger;
|
||||
//# sourceMappingURL=logging.js.map
|
||||
1
lib/logging.js.map
Normal file
1
lib/logging.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAYtC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAFD,4CAEC;AAED,SAAgB,YAAY;IAC1B,OAAO;QACL,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AATD,oCASC"}
|
||||
14
lib/repository.js
generated
Normal file
14
lib/repository.js
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
function parseRepositoryNwo(input) {
|
||||
const parts = input.split('/');
|
||||
if (parts.length !== 2) {
|
||||
throw new Error(`"${input}" is not a valid repository name`);
|
||||
}
|
||||
return {
|
||||
owner: parts[0],
|
||||
repo: parts[1],
|
||||
};
|
||||
}
|
||||
exports.parseRepositoryNwo = parseRepositoryNwo;
|
||||
//# sourceMappingURL=repository.js.map
|
||||
1
lib/repository.js.map
Normal file
1
lib/repository.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"repository.js","sourceRoot":"","sources":["../src/repository.ts"],"names":[],"mappings":";;AAMA,SAAgB,kBAAkB,CAAC,KAAa;IAC9C,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QACtB,MAAM,IAAI,KAAK,CAAC,IAAI,KAAK,kCAAkC,CAAC,CAAC;KAC9D;IACD,OAAO;QACL,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC;QACf,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;KACf,CAAC;AACJ,CAAC;AATD,gDASC"}
|
||||
82
lib/setup-tracer.js
generated
82
lib/setup-tracer.js
generated
@@ -9,13 +9,11 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
const CRITICAL_TRACER_VARS = new Set(['SEMMLE_PRELOAD_libtrace',
|
||||
,
|
||||
@@ -58,7 +56,7 @@ function concatTracerConfigs(configs) {
|
||||
const env = {};
|
||||
let copyExecutables = false;
|
||||
let envSize = 0;
|
||||
for (let v of Object.values(configs)) {
|
||||
for (const v of configs) {
|
||||
for (let e of Object.entries(v.env)) {
|
||||
const name = e[0];
|
||||
const value = e[1];
|
||||
@@ -121,34 +119,51 @@ function concatTracerConfigs(configs) {
|
||||
fs.writeFileSync(envPath, buffer);
|
||||
return { env, spec };
|
||||
}
|
||||
async function sendSuccessStatusReport(startedAt, config) {
|
||||
const statusReportBase = await util.createStatusReportBase('init', 'success', startedAt);
|
||||
const languages = config.languages.join(',');
|
||||
const workflowLanguages = core.getInput('languages', { required: false });
|
||||
const paths = (config.originalUserInput.paths || []).join(',');
|
||||
const pathsIgnore = (config.originalUserInput['paths-ignore'] || []).join(',');
|
||||
const disableDefaultQueries = config.originalUserInput['disable-default-queries'] ? languages : '';
|
||||
const queries = (config.originalUserInput.queries || []).map(q => q.uses).join(',');
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
languages: languages,
|
||||
workflow_languages: workflowLanguages,
|
||||
paths: paths,
|
||||
paths_ignore: pathsIgnore,
|
||||
disable_default_queries: disableDefaultQueries,
|
||||
queries: queries,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
let languages;
|
||||
const startedAt = new Date();
|
||||
let config;
|
||||
let codeql;
|
||||
try {
|
||||
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
|
||||
util.prepareLocalRunEnvironment();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('init', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
core.startGroup('Setup CodeQL tools');
|
||||
codeql = await codeql_1.setupCodeQL();
|
||||
await codeql.printVersion();
|
||||
core.endGroup();
|
||||
core.startGroup('Load language configuration');
|
||||
const config = await configUtils.loadConfig();
|
||||
languages = await util.getLanguages();
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
if (languages.length === 0) {
|
||||
throw new Error("Did not detect any languages to analyze. Please update input in workflow.");
|
||||
}
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config, languages);
|
||||
config = await configUtils.initConfig();
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
core.endGroup();
|
||||
}
|
||||
catch (e) {
|
||||
core.setFailed(e.message);
|
||||
await util.reportActionAborted('init', e.message);
|
||||
console.log(e);
|
||||
await util.sendStatusReport(await util.createStatusReportBase('init', 'aborted', startedAt, e.message));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const sourceRoot = path.resolve();
|
||||
core.startGroup('Setup CodeQL tools');
|
||||
const codeql = await codeql_1.setupCodeQL();
|
||||
await codeql.printVersion();
|
||||
core.endGroup();
|
||||
// Forward Go flags
|
||||
const goFlags = process.env['GOFLAGS'];
|
||||
if (goFlags) {
|
||||
@@ -158,27 +173,22 @@ async function run() {
|
||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
|
||||
core.exportVariable('CODEQL_RAM', codeqlRam);
|
||||
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
|
||||
await io.mkdirP(databaseFolder);
|
||||
let tracedLanguages = {};
|
||||
let scannedLanguages = [];
|
||||
const databaseFolder = util.getCodeQLDatabasesDir();
|
||||
fs.mkdirSync(databaseFolder, { recursive: true });
|
||||
let tracedLanguageConfigs = [];
|
||||
// TODO: replace this code once CodeQL supports multi-language tracing
|
||||
for (let language of languages) {
|
||||
for (let language of config.languages) {
|
||||
const languageDatabase = path.join(databaseFolder, language);
|
||||
// Init language database
|
||||
await codeql.databaseInit(languageDatabase, language, sourceRoot);
|
||||
// TODO: add better detection of 'traced languages' instead of using a hard coded list
|
||||
if (['cpp', 'java', 'csharp'].includes(language)) {
|
||||
if (codeql_1.isTracedLanguage(language)) {
|
||||
const config = await tracerConfig(codeql, languageDatabase);
|
||||
tracedLanguages[language] = config;
|
||||
}
|
||||
else {
|
||||
scannedLanguages.push(language);
|
||||
tracedLanguageConfigs.push(config);
|
||||
}
|
||||
}
|
||||
const tracedLanguageKeys = Object.keys(tracedLanguages);
|
||||
if (tracedLanguageKeys.length > 0) {
|
||||
const mainTracerConfig = concatTracerConfigs(tracedLanguages);
|
||||
if (tracedLanguageConfigs.length > 0) {
|
||||
const mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs);
|
||||
if (mainTracerConfig.spec) {
|
||||
for (let entry of Object.entries(mainTracerConfig.env)) {
|
||||
core.exportVariable(entry[0], entry[1]);
|
||||
@@ -198,18 +208,14 @@ async function run() {
|
||||
}
|
||||
}
|
||||
}
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES, scannedLanguages.join(','));
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES, tracedLanguageKeys.join(','));
|
||||
// TODO: make this a "private" environment variable of the action
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_DATABASE_DIR, databaseFolder);
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('init', error.message, error.stack);
|
||||
console.log(error);
|
||||
await util.sendStatusReport(await util.createStatusReportBase('init', 'failure', startedAt, error.message, error.stack));
|
||||
return;
|
||||
}
|
||||
await util.reportActionSucceeded('init');
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
await sendSuccessStatusReport(startedAt, config);
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("init action failed: " + e);
|
||||
|
||||
File diff suppressed because one or more lines are too long
9
lib/shared-environment.js
generated
9
lib/shared-environment.js
generated
@@ -1,17 +1,10 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
|
||||
exports.CODEQL_ACTION_LANGUAGES = 'CODEQL_ACTION_LANGUAGES';
|
||||
exports.CODEQL_ACTION_ANALYSIS_KEY = 'CODEQL_ACTION_ANALYSIS_KEY';
|
||||
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
|
||||
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
|
||||
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
||||
// The time at which the first action (normally init) started executing.
|
||||
// If a workflow invokes a different action without first invoking the init
|
||||
// action (i.e. the upload action is being used by a third-party integrator)
|
||||
// then this variable will be assigned the start time of the action invoked
|
||||
// rather that the init action.
|
||||
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
|
||||
// Populated when the init action completes successfully
|
||||
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
|
||||
exports.CODEQL_WORKFLOW_STARTED_AT = 'CODEQL_WORKFLOW_STARTED_AT';
|
||||
//# sourceMappingURL=shared-environment.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AACpD,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,+BAA+B,GAAG,iCAAiC,CAAC;AACpE,QAAA,8BAA8B,GAAG,gCAAgC,CAAC;AAC/E,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,wBAAwB,GAAG,0BAA0B,CAAC;AACnE,wDAAwD;AAC3C,QAAA,4BAA4B,GAAG,8BAA8B,CAAC"}
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
|
||||
41
lib/testing-utils.js
generated
41
lib/testing-utils.js
generated
@@ -1,9 +1,19 @@
|
||||
"use strict";
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const sinon_1 = __importDefault(require("sinon"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const CodeQL = __importStar(require("./codeql"));
|
||||
function wrapOutput(context) {
|
||||
// Function signature taken from Socket.write.
|
||||
// Note there are two overloads:
|
||||
@@ -32,6 +42,10 @@ function wrapOutput(context) {
|
||||
function setupTests(test) {
|
||||
const typedTest = test;
|
||||
typedTest.beforeEach(t => {
|
||||
// Set an empty CodeQL object so that all method calls will fail
|
||||
// unless the test explicitly sets one up.
|
||||
CodeQL.setCodeQL({});
|
||||
// Replace stdout and stderr so we can record output during tests
|
||||
t.context.testOutput = "";
|
||||
const processStdoutWrite = process.stdout.write.bind(process.stdout);
|
||||
t.context.stdoutWrite = processStdoutWrite;
|
||||
@@ -39,17 +53,40 @@ function setupTests(test) {
|
||||
const processStderrWrite = process.stderr.write.bind(process.stderr);
|
||||
t.context.stderrWrite = processStderrWrite;
|
||||
process.stderr.write = wrapOutput(t.context);
|
||||
// Many tests modify environment variables. Take a copy now so that
|
||||
// we reset them after the test to keep tests independent of each other.
|
||||
// process.env only has strings fields, so a shallow copy is fine.
|
||||
t.context.env = {};
|
||||
Object.assign(t.context.env, process.env);
|
||||
// Any test that runs code that expects to only be run on actions
|
||||
// will depend on various environment variables.
|
||||
process.env['GITHUB_API_URL'] = 'https://github.localhost/api/v3';
|
||||
});
|
||||
typedTest.afterEach.always(t => {
|
||||
// Restore stdout and stderr
|
||||
// The captured output is only replayed if the test failed
|
||||
process.stdout.write = t.context.stdoutWrite;
|
||||
process.stderr.write = t.context.stderrWrite;
|
||||
if (!t.passed) {
|
||||
process.stdout.write(t.context.testOutput);
|
||||
}
|
||||
});
|
||||
typedTest.afterEach.always(() => {
|
||||
// Undo any modifications made by sinon
|
||||
sinon_1.default.restore();
|
||||
// Undo any modifications to the env
|
||||
process.env = t.context.env;
|
||||
});
|
||||
}
|
||||
exports.setupTests = setupTests;
|
||||
function mockGetContents(content, status) {
|
||||
// Passing an auth token is required, so we just use a dummy value
|
||||
let client = new github.GitHub('123');
|
||||
const response = {
|
||||
data: content,
|
||||
status: status
|
||||
};
|
||||
const spyGetContents = sinon_1.default.stub(client.repos, "getContents").resolves(response);
|
||||
sinon_1.default.stub(api, "getApiClient").value(() => client);
|
||||
return spyGetContents;
|
||||
}
|
||||
exports.mockGetContents = mockGetContents;
|
||||
//# sourceMappingURL=testing-utils.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;AACA,kDAA0B;AAI1B,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CAAC,KAA0B,EAAE,QAAiB,EAAE,EAA0B,EAAW,EAAE;QAC5F,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACvB,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAE1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;IACtD,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;QAC7B,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAE7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;IACH,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,GAAG,EAAE;QAC9B,eAAK,CAAC,OAAO,EAAE,CAAC;IAClB,CAAC,CAAC,CAAC;AACL,CAAC;AA3BD,gCA2BC"}
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,wDAA0C;AAE1C,kDAA0B;AAE1B,kDAAoC;AACpC,iDAAmC;AAInC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CAAC,KAA0B,EAAE,QAAiB,EAAE,EAA0B,EAAW,EAAE;QAC5F,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACvB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;QAE1C,iEAAiE;QACjE,gDAAgD;QAChD,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,iCAAiC,CAAC;IACpE,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;QAC7B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AA3CD,gCA2CC;AAID,SAAgB,eAAe,CAAC,OAA4B,EAAE,MAAc;IAC1E,kEAAkE;IAClE,IAAI,MAAM,GAAG,IAAI,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IACtC,MAAM,QAAQ,GAAG;QACf,IAAI,EAAE,OAAO;QACb,MAAM,EAAE,MAAM;KACf,CAAC;IAEF,MAAM,cAAc,GAAG,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,aAAa,CAAC,CAAC,QAAQ,CAAC,QAAe,CAAC,CAAC;IACzF,eAAK,CAAC,IAAI,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;IACpD,OAAO,cAAc,CAAC;AACxB,CAAC;AAXD,0CAWC"}
|
||||
175
lib/upload-lib.js
generated
175
lib/upload-lib.js
generated
@@ -43,41 +43,43 @@ function combineSarifFiles(sarifFiles) {
|
||||
exports.combineSarifFiles = combineSarifFiles;
|
||||
// Upload the given payload.
|
||||
// If the request fails then this will retry a small number of times.
|
||||
async function uploadPayload(payload) {
|
||||
core.info('Uploading results');
|
||||
async function uploadPayload(payload, repositoryNwo, githubAuth, githubApiUrl, mode, logger) {
|
||||
logger.info('Uploading results');
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env['TEST_MODE'] === 'true' || false;
|
||||
if (testMode) {
|
||||
return true;
|
||||
return;
|
||||
}
|
||||
const [owner, repo] = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
||||
// Make up to 4 attempts to upload, and sleep for these
|
||||
// number of seconds between each attempt.
|
||||
// We don't want to backoff too much to avoid wasting action
|
||||
// minutes, but just waiting a little bit could maybe help.
|
||||
const backoffPeriods = [1, 5, 15];
|
||||
const client = api.getApiClient(githubAuth, githubApiUrl);
|
||||
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
|
||||
const response = await api.getApiClient().request("PUT /repos/:owner/:repo/code-scanning/analysis", ({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
const reqURL = mode === 'actions'
|
||||
? 'PUT /repos/:owner/:repo/code-scanning/analysis'
|
||||
: 'POST /repos/:owner/:repo/code-scanning/sarifs';
|
||||
const response = await client.request(reqURL, ({
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
data: payload,
|
||||
}));
|
||||
core.debug('response status: ' + response.status);
|
||||
logger.debug('response status: ' + response.status);
|
||||
const statusCode = response.status;
|
||||
if (statusCode === 202) {
|
||||
core.info("Successfully uploaded results");
|
||||
return true;
|
||||
logger.info("Successfully uploaded results");
|
||||
return;
|
||||
}
|
||||
const requestID = response.headers["x-github-request-id"];
|
||||
// On any other status code that's not 5xx mark the upload as failed
|
||||
if (!statusCode || statusCode < 500 || statusCode >= 600) {
|
||||
core.setFailed('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
return false;
|
||||
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
}
|
||||
// On a 5xx status code we may retry the request
|
||||
if (attempt < backoffPeriods.length) {
|
||||
// Log the failure as a warning but don't mark the action as failed yet
|
||||
core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
|
||||
logger.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
|
||||
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
|
||||
' seconds: (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
// Sleep for the backoff period
|
||||
@@ -88,29 +90,34 @@ async function uploadPayload(payload) {
|
||||
// If the upload fails with 5xx then we assume it is a temporary problem
|
||||
// and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
core.error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
return false;
|
||||
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
}
|
||||
}
|
||||
return false;
|
||||
// This case shouldn't ever happen as the final iteration of the loop
|
||||
// will always throw an error instead of exiting to here.
|
||||
throw new Error('Upload failed');
|
||||
}
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function upload(input) {
|
||||
if (fs.lstatSync(input).isDirectory()) {
|
||||
const sarifFiles = fs.readdirSync(input)
|
||||
async function upload(sarifPath, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubApiUrl, mode, logger) {
|
||||
const sarifFiles = [];
|
||||
if (!fs.existsSync(sarifPath)) {
|
||||
throw new Error(`Path does not exist: ${sarifPath}`);
|
||||
}
|
||||
if (fs.lstatSync(sarifPath).isDirectory()) {
|
||||
fs.readdirSync(sarifPath)
|
||||
.filter(f => f.endsWith(".sarif"))
|
||||
.map(f => path.resolve(input, f));
|
||||
.map(f => path.resolve(sarifPath, f))
|
||||
.forEach(f => sarifFiles.push(f));
|
||||
if (sarifFiles.length === 0) {
|
||||
core.setFailed("No SARIF files found to upload in \"" + input + "\".");
|
||||
return false;
|
||||
throw new Error("No SARIF files found to upload in \"" + sarifPath + "\".");
|
||||
}
|
||||
return await uploadFiles(sarifFiles);
|
||||
}
|
||||
else {
|
||||
return await uploadFiles([input]);
|
||||
sarifFiles.push(sarifPath);
|
||||
}
|
||||
return await uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubApiUrl, mode, logger);
|
||||
}
|
||||
exports.upload = upload;
|
||||
// Counts the number of results in the given SARIF file
|
||||
@@ -123,87 +130,83 @@ function countResultsInSarif(sarif) {
|
||||
}
|
||||
exports.countResultsInSarif = countResultsInSarif;
|
||||
// Validates that the given file path refers to a valid SARIF file.
|
||||
// Returns a non-empty list of error message if the file is invalid,
|
||||
// otherwise returns the empty list if the file is valid.
|
||||
function validateSarifFileSchema(sarifFilePath) {
|
||||
// Throws an error if the file is invalid.
|
||||
function validateSarifFileSchema(sarifFilePath, logger) {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
|
||||
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
|
||||
const schema = require('../src/sarif_v2.1.0_schema.json');
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (result.valid) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
// Set the failure message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const errorMessages = result.errors.map(e => "- " + e.stack);
|
||||
core.setFailed("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + errorMessages.join("\n"));
|
||||
// Also output the more verbose error messages in groups as these may be very large.
|
||||
if (!result.valid) {
|
||||
// Output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
core.startGroup("Error details: " + error.stack);
|
||||
core.info(JSON.stringify(error, null, 2));
|
||||
core.endGroup();
|
||||
logger.startGroup("Error details: " + error.stack);
|
||||
logger.info(JSON.stringify(error, null, 2));
|
||||
logger.endGroup();
|
||||
}
|
||||
return false;
|
||||
// Set the main error message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const sarifErrors = result.errors.map(e => "- " + e.stack);
|
||||
throw new Error("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + sarifErrors.join("\n"));
|
||||
}
|
||||
}
|
||||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles) {
|
||||
core.startGroup("Uploading results");
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
return false;
|
||||
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubApiUrl, mode, logger) {
|
||||
logger.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
if (mode === 'actions') {
|
||||
// This check only works on actions as env vars don't persist between calls to the CLI
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
if (!validateSarifFileSchema(file)) {
|
||||
return false;
|
||||
}
|
||||
validateSarifFileSchema(file, logger);
|
||||
}
|
||||
const commitOid = await util.getCommitOid();
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRef();
|
||||
const analysisKey = await util.getAnalysisKey();
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutPath = core.getInput('checkout_path');
|
||||
let checkoutURI = file_url_1.default(checkoutPath);
|
||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
return false;
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
matrix = undefined;
|
||||
}
|
||||
const toolNames = util.getToolNames(sarifPayload);
|
||||
const payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_key": analysisKey,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": matrix,
|
||||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
let payload;
|
||||
if (mode === 'actions') {
|
||||
payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_key": analysisKey,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": environment,
|
||||
"started_at": process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT],
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
}
|
||||
else {
|
||||
payload = JSON.stringify({
|
||||
"commit_sha": commitOid,
|
||||
"ref": ref,
|
||||
"sarif": zipped_sarif,
|
||||
"checkout_uri": checkoutURI,
|
||||
"tool_name": toolNames[0],
|
||||
});
|
||||
}
|
||||
// Log some useful debug info about the info
|
||||
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
|
||||
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
|
||||
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug("Raw upload size: " + rawUploadSizeBytes + " bytes");
|
||||
const zippedUploadSizeBytes = zipped_sarif.length;
|
||||
logger.debug("Base64 zipped upload size: " + zippedUploadSizeBytes + " bytes");
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
logger.debug("Number of results in upload: " + numResultInSarif);
|
||||
// Make the upload
|
||||
const succeeded = await uploadPayload(payload);
|
||||
core.endGroup();
|
||||
return succeeded;
|
||||
await uploadPayload(payload, repositoryNwo, githubAuth, githubApiUrl, mode, logger);
|
||||
return {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif,
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=upload-lib.js.map
|
||||
File diff suppressed because one or more lines are too long
7
lib/upload-lib.test.js
generated
7
lib/upload-lib.test.js
generated
@@ -11,17 +11,16 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default('validateSarifFileSchema - valid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
|
||||
t.true(uploadLib.validateSarifFileSchema(inputFile));
|
||||
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getCLILogger()));
|
||||
});
|
||||
ava_1.default('validateSarifFileSchema - invalid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
|
||||
t.false(uploadLib.validateSarifFileSchema(inputFile));
|
||||
// validateSarifFileSchema calls core.setFailed which sets the exit code on error
|
||||
process.exitCode = 0;
|
||||
t.throws(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getCLILogger()));
|
||||
});
|
||||
//# sourceMappingURL=upload-lib.test.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,mDAA2C;AAC3C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;IACtD,iFAAiF;IACjF,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,uCAAyC;AACzC,mDAA2C;AAC3C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,sBAAY,EAAE,CAAC,CAAC,CAAC;AAClF,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,sBAAY,EAAE,CAAC,CAAC,CAAC;AAC/E,CAAC,CAAC,CAAC"}
|
||||
24
lib/upload-sarif.js
generated
24
lib/upload-sarif.js
generated
@@ -8,23 +8,31 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function sendSuccessStatusReport(startedAt, uploadStats) {
|
||||
const statusReportBase = await util.createStatusReportBase('upload-sarif', 'success', startedAt);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...uploadStats,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
if (util.should_abort('upload-sarif', false) || !await util.reportActionStarting('upload-sarif')) {
|
||||
const startedAt = new Date();
|
||||
if (!await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
if (await upload_lib.upload(core.getInput('sarif_file'))) {
|
||||
await util.reportActionSucceeded('upload-sarif');
|
||||
}
|
||||
else {
|
||||
await util.reportActionFailed('upload-sarif', 'upload');
|
||||
}
|
||||
const uploadStats = await upload_lib.upload(core.getInput('sarif_file'), repository_1.parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')), await util.getCommitOid(), util.getRef(), await util.getAnalysisKey(), util.getRequiredEnvParam('GITHUB_WORKFLOW'), util.getWorkflowRunID(), core.getInput('checkout_path'), core.getInput('matrix'), core.getInput('token'), util.getRequiredEnvParam('GITHUB_API_URL'), 'actions', logging_1.getActionsLogger());
|
||||
await sendSuccessStatusReport(startedAt, uploadStats);
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('upload-sarif', error.message, error.stack);
|
||||
console.log(error);
|
||||
await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'failure', startedAt, error.message, error.stack));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,yDAA2C;AAC3C,6CAA+B;AAE/B,KAAK,UAAU,GAAG;IAChB,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,KAAK,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,cAAc,CAAC,EAAE;QAChG,OAAO;KACR;IAED,IAAI;QACF,IAAI,MAAM,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE;YACxD,MAAM,IAAI,CAAC,qBAAqB,CAAC,cAAc,CAAC,CAAC;SAClD;aAAM;YACL,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;SACzD;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QAC1E,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAI/B,KAAK,UAAU,uBAAuB,CAAC,SAAe,EAAE,WAA0C;IAChG,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,cAAc,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IACjG,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAI,WAAW;KAChB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,cAAc,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;QAChH,OAAO;KACR;IAED,IAAI;QACF,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,EAC3B,+BAAkB,CAAC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACjE,MAAM,IAAI,CAAC,YAAY,EAAE,EACzB,IAAI,CAAC,MAAM,EAAE,EACb,MAAM,IAAI,CAAC,cAAc,EAAE,EAC3B,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAC3C,IAAI,CAAC,gBAAgB,EAAE,EACvB,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC,EAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EACvB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,CAAC,EAC1C,SAAS,EACT,0BAAgB,EAAE,CAAC,CAAC;QACtB,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KAEvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAC3D,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;QAChB,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
310
lib/util.js
generated
310
lib/util.js
generated
@@ -15,27 +15,24 @@ const path = __importStar(require("path"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
/**
|
||||
* Should the current action be aborted?
|
||||
*
|
||||
* This method should be called at the start of all CodeQL actions and they
|
||||
* should abort cleanly if this returns true without failing the action.
|
||||
* This method will call `core.setFailed` if necessary.
|
||||
* The API URL for github.com.
|
||||
*/
|
||||
function should_abort(actionName, requireInitActionHasRun) {
|
||||
// Check that required aspects of the environment are present
|
||||
const ref = process.env['GITHUB_REF'];
|
||||
if (ref === undefined) {
|
||||
core.setFailed('GITHUB_REF must be set.');
|
||||
return true;
|
||||
}
|
||||
// If the init action is required, then check the it completed successfully.
|
||||
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
|
||||
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
exports.GITHUB_DOTCOM_API_URL = "https://api.github.com";
|
||||
/**
|
||||
* Get the API URL for the GitHub instance we are connected to.
|
||||
* May be for github.com or for an enterprise instance.
|
||||
*/
|
||||
function getInstanceAPIURL() {
|
||||
return process.env["GITHUB_API_URL"] || exports.GITHUB_DOTCOM_API_URL;
|
||||
}
|
||||
exports.should_abort = should_abort;
|
||||
exports.getInstanceAPIURL = getInstanceAPIURL;
|
||||
/**
|
||||
* Are we running against a GitHub Enterpise instance, as opposed to github.com.
|
||||
*/
|
||||
function isEnterprise() {
|
||||
return getInstanceAPIURL() !== exports.GITHUB_DOTCOM_API_URL;
|
||||
}
|
||||
exports.isEnterprise = isEnterprise;
|
||||
/**
|
||||
* Get an environment parameter, but throw an error if it is not set.
|
||||
*/
|
||||
@@ -49,82 +46,43 @@ function getRequiredEnvParam(paramName) {
|
||||
}
|
||||
exports.getRequiredEnvParam = getRequiredEnvParam;
|
||||
/**
|
||||
* Gets the set of languages in the current repository
|
||||
* Get the extra options for the codeql commands.
|
||||
*/
|
||||
async function getLanguagesInRepo() {
|
||||
var _a;
|
||||
// Translate between GitHub's API names for languages and ours
|
||||
const codeqlLanguages = {
|
||||
'C': 'cpp',
|
||||
'C++': 'cpp',
|
||||
'C#': 'csharp',
|
||||
'Go': 'go',
|
||||
'Java': 'java',
|
||||
'JavaScript': 'javascript',
|
||||
'TypeScript': 'javascript',
|
||||
'Python': 'python',
|
||||
};
|
||||
let repo_nwo = (_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.split("/");
|
||||
if (repo_nwo) {
|
||||
let owner = repo_nwo[0];
|
||||
let repo = repo_nwo[1];
|
||||
core.debug(`GitHub repo ${owner} ${repo}`);
|
||||
const response = await api.getApiClient().request("GET /repos/:owner/:repo/languages", ({
|
||||
owner,
|
||||
repo
|
||||
}));
|
||||
core.debug("Languages API response: " + JSON.stringify(response));
|
||||
// The GitHub API is going to return languages in order of popularity,
|
||||
// When we pick a language to autobuild we want to pick the most popular traced language
|
||||
// Since sets in javascript maintain insertion order, using a set here and then splatting it
|
||||
// into an array gives us an array of languages ordered by popularity
|
||||
let languages = new Set();
|
||||
for (let lang in response.data) {
|
||||
if (lang in codeqlLanguages) {
|
||||
languages.add(codeqlLanguages[lang]);
|
||||
}
|
||||
}
|
||||
return [...languages];
|
||||
function getExtraOptionsEnvParam() {
|
||||
const varName = 'CODEQL_ACTION_EXTRA_OPTIONS';
|
||||
const raw = process.env[varName];
|
||||
if (raw === undefined || raw.length === 0) {
|
||||
return {};
|
||||
}
|
||||
else {
|
||||
return [];
|
||||
try {
|
||||
return JSON.parse(raw);
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(varName +
|
||||
' environment variable is set, but does not contain valid JSON: ' +
|
||||
e.message);
|
||||
}
|
||||
}
|
||||
exports.getExtraOptionsEnvParam = getExtraOptionsEnvParam;
|
||||
function isLocalRun() {
|
||||
return !!process.env.CODEQL_LOCAL_RUN
|
||||
&& process.env.CODEQL_LOCAL_RUN !== 'false'
|
||||
&& process.env.CODEQL_LOCAL_RUN !== '0';
|
||||
}
|
||||
exports.isLocalRun = isLocalRun;
|
||||
/**
|
||||
* Get the languages to analyse.
|
||||
*
|
||||
* The result is obtained from the environment parameter CODEQL_ACTION_LANGUAGES
|
||||
* if that has been set, otherwise it is obtained from the action input parameter
|
||||
* 'languages' if that has been set, otherwise it is deduced as all languages in the
|
||||
* repo that can be analysed.
|
||||
*
|
||||
* If the languages are obtained from either of the second choices, the
|
||||
* CODEQL_ACTION_LANGUAGES environment variable will be exported with the
|
||||
* deduced list.
|
||||
* Ensures all required environment variables are set in the context of a local run.
|
||||
*/
|
||||
async function getLanguages() {
|
||||
// Obtain from CODEQL_ACTION_LANGUAGES if set
|
||||
const langsVar = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES];
|
||||
if (langsVar) {
|
||||
return langsVar.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
function prepareLocalRunEnvironment() {
|
||||
if (!isLocalRun()) {
|
||||
return;
|
||||
}
|
||||
// Obtain from action input 'languages' if set
|
||||
let languages = core.getInput('languages', { required: false })
|
||||
.split(',')
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
core.info("Languages from configuration: " + JSON.stringify(languages));
|
||||
if (languages.length === 0) {
|
||||
// Obtain languages as all languages in the repo that can be analysed
|
||||
languages = await getLanguagesInRepo();
|
||||
core.info("Automatically detected languages: " + JSON.stringify(languages));
|
||||
core.debug('Action is running locally.');
|
||||
if (!process.env.GITHUB_JOB) {
|
||||
core.exportVariable('GITHUB_JOB', 'UNKNOWN-JOB');
|
||||
}
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_LANGUAGES, languages.join(','));
|
||||
return languages;
|
||||
}
|
||||
exports.getLanguages = getLanguages;
|
||||
exports.prepareLocalRunEnvironment = prepareLocalRunEnvironment;
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
@@ -161,7 +119,7 @@ async function getWorkflowPath() {
|
||||
const owner = repo_nwo[0];
|
||||
const repo = repo_nwo[1];
|
||||
const run_id = Number(getRequiredEnvParam('GITHUB_RUN_ID'));
|
||||
const apiClient = api.getApiClient();
|
||||
const apiClient = api.getActionsApiClient();
|
||||
const runsResponse = await apiClient.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
|
||||
owner,
|
||||
repo,
|
||||
@@ -171,6 +129,17 @@ async function getWorkflowPath() {
|
||||
const workflowResponse = await apiClient.request('GET ' + workflowUrl);
|
||||
return workflowResponse.data.path;
|
||||
}
|
||||
/**
|
||||
* Get the workflow run ID.
|
||||
*/
|
||||
function getWorkflowRunID() {
|
||||
const workflowRunID = parseInt(getRequiredEnvParam('GITHUB_RUN_ID'), 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
throw new Error('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
}
|
||||
return workflowRunID;
|
||||
}
|
||||
exports.getWorkflowRunID = getWorkflowRunID;
|
||||
/**
|
||||
* Get the analysis key paramter for the current job.
|
||||
*
|
||||
@@ -179,14 +148,15 @@ async function getWorkflowPath() {
|
||||
* the github API, but after that the result will be cached.
|
||||
*/
|
||||
async function getAnalysisKey() {
|
||||
let analysisKey = process.env[sharedEnv.CODEQL_ACTION_ANALYSIS_KEY];
|
||||
const analysisKeyEnvVar = 'CODEQL_ACTION_ANALYSIS_KEY';
|
||||
let analysisKey = process.env[analysisKeyEnvVar];
|
||||
if (analysisKey !== undefined) {
|
||||
return analysisKey;
|
||||
}
|
||||
const workflowPath = await getWorkflowPath();
|
||||
const jobName = getRequiredEnvParam('GITHUB_JOB');
|
||||
analysisKey = workflowPath + ':' + jobName;
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_ANALYSIS_KEY, analysisKey);
|
||||
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
||||
return analysisKey;
|
||||
}
|
||||
exports.getAnalysisKey = getAnalysisKey;
|
||||
@@ -215,10 +185,11 @@ exports.getRef = getRef;
|
||||
*
|
||||
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
||||
* @param status The status. Must be 'success', 'failure', or 'starting'
|
||||
* @param startedAt The time this action started executing.
|
||||
* @param cause Cause of failure (only supply if status is 'failure')
|
||||
* @param exception Exception (only supply if status is 'failure')
|
||||
*/
|
||||
async function createStatusReport(actionName, status, cause, exception) {
|
||||
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
|
||||
const commitOid = process.env['GITHUB_SHA'] || '';
|
||||
const ref = getRef();
|
||||
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
|
||||
@@ -229,20 +200,22 @@ async function createStatusReport(actionName, status, cause, exception) {
|
||||
const workflowName = process.env['GITHUB_WORKFLOW'] || '';
|
||||
const jobName = process.env['GITHUB_JOB'] || '';
|
||||
const analysis_key = await getAnalysisKey();
|
||||
const languages = (await getLanguages()).sort().join(',');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT] || new Date().toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_STARTED_AT, startedAt);
|
||||
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
|
||||
if (workflowStartedAt === undefined) {
|
||||
workflowStartedAt = actionStartedAt.toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||
}
|
||||
let statusReport = {
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_name: workflowName,
|
||||
job_name: jobName,
|
||||
analysis_key: analysis_key,
|
||||
languages: languages,
|
||||
commit_oid: commitOid,
|
||||
ref: ref,
|
||||
action_name: actionName,
|
||||
action_oid: "unknown",
|
||||
started_at: startedAt,
|
||||
started_at: workflowStartedAt,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
status: status
|
||||
};
|
||||
// Add optional parameters
|
||||
@@ -261,80 +234,54 @@ async function createStatusReport(actionName, status, cause, exception) {
|
||||
}
|
||||
return statusReport;
|
||||
}
|
||||
exports.createStatusReportBase = createStatusReportBase;
|
||||
/**
|
||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||
*
|
||||
* Returns the status code of the response to the status request.
|
||||
* Optionally checks the response from the API endpoint and sets the action
|
||||
* as failed if the status report failed. This is only expected to be used
|
||||
* when sending a 'starting' report.
|
||||
*
|
||||
* Returns whether sending the status report was successful of not.
|
||||
*/
|
||||
async function sendStatusReport(statusReport) {
|
||||
async function sendStatusReport(statusReport, ignoreFailures) {
|
||||
if (isEnterprise()) {
|
||||
core.debug("Not sending status report to GitHub Enterprise");
|
||||
return true;
|
||||
}
|
||||
if (isLocalRun()) {
|
||||
core.debug("Not sending status report because this is a local run");
|
||||
return true;
|
||||
}
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug('Sending status report: ' + statusReportJSON);
|
||||
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
|
||||
const [owner, repo] = nwo.split("/");
|
||||
const statusResponse = await api.getApiClient().request('PUT /repos/:owner/:repo/code-scanning/analysis/status', {
|
||||
const client = api.getActionsApiClient();
|
||||
const statusResponse = await client.request('PUT /repos/:owner/:repo/code-scanning/analysis/status', {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
data: statusReportJSON,
|
||||
});
|
||||
return statusResponse.status;
|
||||
}
|
||||
/**
|
||||
* Send a status report that an action is starting.
|
||||
*
|
||||
* If the action is `init` then this also records the start time in the environment,
|
||||
* and ensures that the analysed languages are also recorded in the envirenment.
|
||||
*
|
||||
* Returns true unless a problem occurred and the action should abort.
|
||||
*/
|
||||
async function reportActionStarting(action) {
|
||||
const statusCode = await sendStatusReport(await createStatusReport(action, 'starting'));
|
||||
// If the status report request fails with a 403 or a 404, then this is a deliberate
|
||||
// message from the endpoint that the SARIF upload can be expected to fail too,
|
||||
// so the action should fail to avoid wasting actions minutes.
|
||||
//
|
||||
// Other failure responses (or lack thereof) could be transitory and should not
|
||||
// cause the action to fail.
|
||||
if (statusCode === 403) {
|
||||
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
|
||||
return false;
|
||||
}
|
||||
if (statusCode === 404) {
|
||||
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
|
||||
return false;
|
||||
if (!ignoreFailures) {
|
||||
// If the status report request fails with a 403 or a 404, then this is a deliberate
|
||||
// message from the endpoint that the SARIF upload can be expected to fail too,
|
||||
// so the action should fail to avoid wasting actions minutes.
|
||||
//
|
||||
// Other failure responses (or lack thereof) could be transitory and should not
|
||||
// cause the action to fail.
|
||||
if (statusResponse.status === 403) {
|
||||
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
|
||||
return false;
|
||||
}
|
||||
if (statusResponse.status === 404) {
|
||||
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
exports.reportActionStarting = reportActionStarting;
|
||||
/**
|
||||
* Report that an action has failed.
|
||||
*
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
async function reportActionFailed(action, cause, exception) {
|
||||
await sendStatusReport(await createStatusReport(action, 'failure', cause, exception));
|
||||
}
|
||||
exports.reportActionFailed = reportActionFailed;
|
||||
/**
|
||||
* Report that an action has succeeded.
|
||||
*
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
async function reportActionSucceeded(action) {
|
||||
await sendStatusReport(await createStatusReport(action, 'success'));
|
||||
}
|
||||
exports.reportActionSucceeded = reportActionSucceeded;
|
||||
/**
|
||||
* Report that an action has been aborted.
|
||||
*
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
async function reportActionAborted(action, cause) {
|
||||
await sendStatusReport(await createStatusReport(action, 'aborted', cause));
|
||||
}
|
||||
exports.reportActionAborted = reportActionAborted;
|
||||
exports.sendStatusReport = sendStatusReport;
|
||||
/**
|
||||
* Get the array of all the tool names contained in the given sarif contents.
|
||||
*
|
||||
@@ -391,29 +338,76 @@ function getMemoryFlag() {
|
||||
}
|
||||
exports.getMemoryFlag = getMemoryFlag;
|
||||
/**
|
||||
* Get the codeql `--threads` value specified for the `threads` input. The value
|
||||
* defaults to 1. The value will be capped to the number of available CPUs.
|
||||
* Get the codeql `--threads` value specified for the `threads` input.
|
||||
* If not value was specified, all available threads will be used.
|
||||
*
|
||||
* The value will be capped to the number of available CPUs.
|
||||
*
|
||||
* @returns string
|
||||
*/
|
||||
function getThreadsFlag() {
|
||||
let numThreads = 1;
|
||||
let numThreads;
|
||||
const numThreadsString = core.getInput("threads");
|
||||
const maxThreads = os.cpus().length;
|
||||
if (numThreadsString) {
|
||||
numThreads = Number(numThreadsString);
|
||||
if (Number.isNaN(numThreads)) {
|
||||
throw new Error(`Invalid threads setting "${numThreadsString}", specified.`);
|
||||
}
|
||||
const maxThreads = os.cpus().length;
|
||||
if (numThreads > maxThreads) {
|
||||
core.info(`Clamping desired number of threads (${numThreads}) to max available (${maxThreads}).`);
|
||||
numThreads = maxThreads;
|
||||
}
|
||||
const minThreads = -maxThreads;
|
||||
if (numThreads < minThreads) {
|
||||
core.info(`Clamping desired number of free threads (${numThreads}) to max available (${minThreads}).`);
|
||||
numThreads = minThreads;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Default to using all threads
|
||||
numThreads = maxThreads;
|
||||
}
|
||||
return `--threads=${numThreads}`;
|
||||
}
|
||||
exports.getThreadsFlag = getThreadsFlag;
|
||||
/**
|
||||
* Get the directory where CodeQL databases should be placed.
|
||||
*/
|
||||
function getCodeQLDatabasesDir() {
|
||||
return path.resolve(getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
|
||||
}
|
||||
exports.getCodeQLDatabasesDir = getCodeQLDatabasesDir;
|
||||
function fileDownloadError(file) {
|
||||
return 'Error while trying to download `' + file + '`';
|
||||
}
|
||||
exports.fileDownloadError = fileDownloadError;
|
||||
function fileIsADirectoryError(file) {
|
||||
return '`' + file + '` is a directory';
|
||||
}
|
||||
exports.fileIsADirectoryError = fileIsADirectoryError;
|
||||
async function getFileContentsUsingAPI(owner, repo, path, ref) {
|
||||
const response = await api.getActionsApiClient(true).repos.getContents({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
path: path,
|
||||
ref: ref,
|
||||
});
|
||||
const file = [owner, repo, path].join('/') + '@' + ref;
|
||||
if (response.status !== 200) {
|
||||
throw new Error(fileDownloadError(file));
|
||||
}
|
||||
if (Array.isArray(response.data)) {
|
||||
throw new Error(fileIsADirectoryError(file));
|
||||
}
|
||||
let fileContents;
|
||||
if ("content" in response.data && response.data.content !== undefined) {
|
||||
fileContents = response.data.content;
|
||||
}
|
||||
else {
|
||||
throw new Error(fileDownloadError(file));
|
||||
}
|
||||
return Buffer.from(fileContents, 'base64').toString('binary');
|
||||
}
|
||||
exports.getFileContentsUsingAPI = getFileContentsUsingAPI;
|
||||
//# sourceMappingURL=util.js.map
|
||||
File diff suppressed because one or more lines are too long
89
lib/util.test.js
generated
89
lib/util.test.js
generated
@@ -13,9 +13,9 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const testingUtils = __importStar(require("./testing-utils"));
|
||||
const util = __importStar(require("./util"));
|
||||
testing_utils_1.setupTests(ava_1.default);
|
||||
testingUtils.setupTests(ava_1.default);
|
||||
ava_1.default('getToolNames', t => {
|
||||
const input = fs.readFileSync(__dirname + '/../src/testdata/tool-names.sarif', 'utf8');
|
||||
const toolNames = util.getToolNames(input);
|
||||
@@ -61,4 +61,89 @@ ava_1.default('getRef() throws on the empty string', t => {
|
||||
process.env["GITHUB_REF"] = "";
|
||||
t.throws(util.getRef);
|
||||
});
|
||||
ava_1.default('isLocalRun() runs correctly', t => {
|
||||
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
|
||||
process.env.CODEQL_LOCAL_RUN = '';
|
||||
t.assert(!util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = 'false';
|
||||
t.assert(!util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = '0';
|
||||
t.assert(!util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = 'true';
|
||||
t.assert(util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = 'hucairz';
|
||||
t.assert(util.isLocalRun());
|
||||
process.env.CODEQL_LOCAL_RUN = origLocalRun;
|
||||
});
|
||||
ava_1.default('prepareEnvironment() when a local run', t => {
|
||||
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
|
||||
process.env.CODEQL_LOCAL_RUN = 'false';
|
||||
process.env.GITHUB_JOB = 'YYY';
|
||||
util.prepareLocalRunEnvironment();
|
||||
// unchanged
|
||||
t.deepEqual(process.env.GITHUB_JOB, 'YYY');
|
||||
process.env.CODEQL_LOCAL_RUN = 'true';
|
||||
util.prepareLocalRunEnvironment();
|
||||
// unchanged
|
||||
t.deepEqual(process.env.GITHUB_JOB, 'YYY');
|
||||
process.env.GITHUB_JOB = '';
|
||||
util.prepareLocalRunEnvironment();
|
||||
// updated
|
||||
t.deepEqual(process.env.GITHUB_JOB, 'UNKNOWN-JOB');
|
||||
process.env.CODEQL_LOCAL_RUN = origLocalRun;
|
||||
});
|
||||
ava_1.default('getExtraOptionsEnvParam() succeeds on valid JSON with invalid options (for now)', t => {
|
||||
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
|
||||
const options = { foo: 42 };
|
||||
process.env.CODEQL_ACTION_EXTRA_OPTIONS = JSON.stringify(options);
|
||||
t.deepEqual(util.getExtraOptionsEnvParam(), options);
|
||||
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
|
||||
});
|
||||
ava_1.default('getExtraOptionsEnvParam() succeeds on valid options', t => {
|
||||
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
|
||||
const options = { database: { init: ["--debug"] } };
|
||||
process.env.CODEQL_ACTION_EXTRA_OPTIONS =
|
||||
JSON.stringify(options);
|
||||
t.deepEqual(util.getExtraOptionsEnvParam(), options);
|
||||
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
|
||||
});
|
||||
ava_1.default('getExtraOptionsEnvParam() fails on invalid JSON', t => {
|
||||
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
|
||||
process.env.CODEQL_ACTION_EXTRA_OPTIONS = "{{invalid-json}}";
|
||||
t.throws(util.getExtraOptionsEnvParam);
|
||||
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
|
||||
});
|
||||
ava_1.default('getFileContentsUsingAPI() throws if the request does not succeed', async (t) => {
|
||||
const spyGetContents = testingUtils.mockGetContents({}, 400);
|
||||
try {
|
||||
await util.getFileContentsUsingAPI('github', 'codeql-action', 'non-existing-file', 'main');
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.assert(spyGetContents.called);
|
||||
t.deepEqual(err, new Error(util.fileDownloadError('github/codeql-action/non-existing-file@main')));
|
||||
}
|
||||
});
|
||||
ava_1.default('getFileContentsUsingAPI() throws if the requested file is a directory', async (t) => {
|
||||
const dummyResponse = []; // directories are returned as arrays
|
||||
const spyGetContents = testingUtils.mockGetContents(dummyResponse, 200);
|
||||
try {
|
||||
await util.getFileContentsUsingAPI('github', 'codeql-action', 'non-existing-file', 'main');
|
||||
throw new Error('initConfig did not throw error');
|
||||
}
|
||||
catch (err) {
|
||||
t.assert(spyGetContents.called);
|
||||
t.deepEqual(err, new Error(util.fileIsADirectoryError('github/codeql-action/non-existing-file@main')));
|
||||
}
|
||||
});
|
||||
ava_1.default('getFileContentsUsingAPI() returns the right content', async (t) => {
|
||||
const inputFileContents = `content content content`;
|
||||
const dummyResponse = {
|
||||
content: Buffer.from(inputFileContents).toString("base64"),
|
||||
};
|
||||
const spyGetContents = testingUtils.mockGetContents(dummyResponse, 200);
|
||||
const content = await util.getFileContentsUsingAPI('github', 'codeql-action', 'non-existing-file', 'main');
|
||||
t.deepEqual(content, inputFileContents);
|
||||
t.assert(spyGetContents.called);
|
||||
});
|
||||
//# sourceMappingURL=util.test.js.map
|
||||
File diff suppressed because one or more lines are too long
1
node_modules/.bin/atob
generated
vendored
Symbolic link
1
node_modules/.bin/atob
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../atob/bin/atob.js
|
||||
1
node_modules/.bin/errno
generated
vendored
Symbolic link
1
node_modules/.bin/errno
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../errno/cli.js
|
||||
1
node_modules/.bin/json5
generated
vendored
Symbolic link
1
node_modules/.bin/json5
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../json5/lib/cli.js
|
||||
1
node_modules/.bin/miller-rabin
generated
vendored
Symbolic link
1
node_modules/.bin/miller-rabin
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../miller-rabin/bin/miller-rabin
|
||||
1
node_modules/.bin/sha.js
generated
vendored
Symbolic link
1
node_modules/.bin/sha.js
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../sha.js/bin.js
|
||||
1
node_modules/.bin/terser
generated
vendored
Symbolic link
1
node_modules/.bin/terser
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../terser/bin/terser
|
||||
1
node_modules/.bin/webpack
generated
vendored
Symbolic link
1
node_modules/.bin/webpack
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../webpack/bin/webpack.js
|
||||
1
node_modules/.bin/webpack-cli
generated
vendored
Symbolic link
1
node_modules/.bin/webpack-cli
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../webpack-cli/bin/cli.js
|
||||
21
node_modules/@webassemblyjs/ast/LICENSE
generated
vendored
Normal file
21
node_modules/@webassemblyjs/ast/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2018 Sven Sauleau <sven@sauleau.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
167
node_modules/@webassemblyjs/ast/README.md
generated
vendored
Normal file
167
node_modules/@webassemblyjs/ast/README.md
generated
vendored
Normal file
@@ -0,0 +1,167 @@
|
||||
# @webassemblyjs/ast
|
||||
|
||||
> AST utils for webassemblyjs
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
yarn add @webassemblyjs/ast
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Traverse
|
||||
|
||||
```js
|
||||
import { traverse } from "@webassemblyjs/ast";
|
||||
|
||||
traverse(ast, {
|
||||
Module(path) {
|
||||
console.log(path.node);
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### Instruction signatures
|
||||
|
||||
```js
|
||||
import { signatures } from "@webassemblyjs/ast";
|
||||
|
||||
console.log(signatures);
|
||||
```
|
||||
|
||||
### Path methods
|
||||
|
||||
- `findParent: NodeLocator`
|
||||
- `replaceWith: Node => void`
|
||||
- `remove: () => void`
|
||||
- `insertBefore: Node => void`
|
||||
- `insertAfter: Node => void`
|
||||
- `stop: () => void`
|
||||
|
||||
### AST utils
|
||||
|
||||
- function `module(id, fields, metadata)`
|
||||
- function `moduleMetadata(sections, functionNames, localNames)`
|
||||
- function `moduleNameMetadata(value)`
|
||||
- function `functionNameMetadata(value, index)`
|
||||
- function `localNameMetadata(value, localIndex, functionIndex)`
|
||||
- function `binaryModule(id, blob)`
|
||||
- function `quoteModule(id, string)`
|
||||
- function `sectionMetadata(section, startOffset, size, vectorOfSize)`
|
||||
- function `loopInstruction(label, resulttype, instr)`
|
||||
- function `instruction(id, args, namedArgs)`
|
||||
- function `objectInstruction(id, object, args, namedArgs)`
|
||||
- function `ifInstruction(testLabel, test, result, consequent, alternate)`
|
||||
- function `stringLiteral(value)`
|
||||
- function `numberLiteralFromRaw(value, raw)`
|
||||
- function `longNumberLiteral(value, raw)`
|
||||
- function `floatLiteral(value, nan, inf, raw)`
|
||||
- function `elem(table, offset, funcs)`
|
||||
- function `indexInFuncSection(index)`
|
||||
- function `valtypeLiteral(name)`
|
||||
- function `typeInstruction(id, functype)`
|
||||
- function `start(index)`
|
||||
- function `globalType(valtype, mutability)`
|
||||
- function `leadingComment(value)`
|
||||
- function `blockComment(value)`
|
||||
- function `data(memoryIndex, offset, init)`
|
||||
- function `global(globalType, init, name)`
|
||||
- function `table(elementType, limits, name, elements)`
|
||||
- function `memory(limits, id)`
|
||||
- function `funcImportDescr(id, signature)`
|
||||
- function `moduleImport(module, name, descr)`
|
||||
- function `moduleExportDescr(exportType, id)`
|
||||
- function `moduleExport(name, descr)`
|
||||
- function `limit(min, max)`
|
||||
- function `signature(params, results)`
|
||||
- function `program(body)`
|
||||
- function `identifier(value, raw)`
|
||||
- function `blockInstruction(label, instr, result)`
|
||||
- function `callInstruction(index, instrArgs)`
|
||||
- function `callIndirectInstruction(signature, intrs)`
|
||||
- function `byteArray(values)`
|
||||
- function `func(name, signature, body, isExternal, metadata)`
|
||||
- Constant`isModule`
|
||||
- Constant`isModuleMetadata`
|
||||
- Constant`isModuleNameMetadata`
|
||||
- Constant`isFunctionNameMetadata`
|
||||
- Constant`isLocalNameMetadata`
|
||||
- Constant`isBinaryModule`
|
||||
- Constant`isQuoteModule`
|
||||
- Constant`isSectionMetadata`
|
||||
- Constant`isLoopInstruction`
|
||||
- Constant`isInstruction`
|
||||
- Constant`isObjectInstruction`
|
||||
- Constant`isIfInstruction`
|
||||
- Constant`isStringLiteral`
|
||||
- Constant`isNumberLiteral`
|
||||
- Constant`isLongNumberLiteral`
|
||||
- Constant`isFloatLiteral`
|
||||
- Constant`isElem`
|
||||
- Constant`isIndexInFuncSection`
|
||||
- Constant`isValtypeLiteral`
|
||||
- Constant`isTypeInstruction`
|
||||
- Constant`isStart`
|
||||
- Constant`isGlobalType`
|
||||
- Constant`isLeadingComment`
|
||||
- Constant`isBlockComment`
|
||||
- Constant`isData`
|
||||
- Constant`isGlobal`
|
||||
- Constant`isTable`
|
||||
- Constant`isMemory`
|
||||
- Constant`isFuncImportDescr`
|
||||
- Constant`isModuleImport`
|
||||
- Constant`isModuleExportDescr`
|
||||
- Constant`isModuleExport`
|
||||
- Constant`isLimit`
|
||||
- Constant`isSignature`
|
||||
- Constant`isProgram`
|
||||
- Constant`isIdentifier`
|
||||
- Constant`isBlockInstruction`
|
||||
- Constant`isCallInstruction`
|
||||
- Constant`isCallIndirectInstruction`
|
||||
- Constant`isByteArray`
|
||||
- Constant`isFunc`
|
||||
- Constant`assertModule`
|
||||
- Constant`assertModuleMetadata`
|
||||
- Constant`assertModuleNameMetadata`
|
||||
- Constant`assertFunctionNameMetadata`
|
||||
- Constant`assertLocalNameMetadata`
|
||||
- Constant`assertBinaryModule`
|
||||
- Constant`assertQuoteModule`
|
||||
- Constant`assertSectionMetadata`
|
||||
- Constant`assertLoopInstruction`
|
||||
- Constant`assertInstruction`
|
||||
- Constant`assertObjectInstruction`
|
||||
- Constant`assertIfInstruction`
|
||||
- Constant`assertStringLiteral`
|
||||
- Constant`assertNumberLiteral`
|
||||
- Constant`assertLongNumberLiteral`
|
||||
- Constant`assertFloatLiteral`
|
||||
- Constant`assertElem`
|
||||
- Constant`assertIndexInFuncSection`
|
||||
- Constant`assertValtypeLiteral`
|
||||
- Constant`assertTypeInstruction`
|
||||
- Constant`assertStart`
|
||||
- Constant`assertGlobalType`
|
||||
- Constant`assertLeadingComment`
|
||||
- Constant`assertBlockComment`
|
||||
- Constant`assertData`
|
||||
- Constant`assertGlobal`
|
||||
- Constant`assertTable`
|
||||
- Constant`assertMemory`
|
||||
- Constant`assertFuncImportDescr`
|
||||
- Constant`assertModuleImport`
|
||||
- Constant`assertModuleExportDescr`
|
||||
- Constant`assertModuleExport`
|
||||
- Constant`assertLimit`
|
||||
- Constant`assertSignature`
|
||||
- Constant`assertProgram`
|
||||
- Constant`assertIdentifier`
|
||||
- Constant`assertBlockInstruction`
|
||||
- Constant`assertCallInstruction`
|
||||
- Constant`assertCallIndirectInstruction`
|
||||
- Constant`assertByteArray`
|
||||
- Constant`assertFunc`
|
||||
|
||||
10
node_modules/@webassemblyjs/ast/esm/clone.js
generated
vendored
Normal file
10
node_modules/@webassemblyjs/ast/esm/clone.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
export function cloneNode(n) {
|
||||
// $FlowIgnore
|
||||
var newObj = {};
|
||||
|
||||
for (var k in n) {
|
||||
newObj[k] = n[k];
|
||||
}
|
||||
|
||||
return newObj;
|
||||
}
|
||||
663
node_modules/@webassemblyjs/ast/esm/definitions.js
generated
vendored
Normal file
663
node_modules/@webassemblyjs/ast/esm/definitions.js
generated
vendored
Normal file
@@ -0,0 +1,663 @@
|
||||
var definitions = {};
|
||||
|
||||
function defineType(typeName, metadata) {
|
||||
definitions[typeName] = metadata;
|
||||
}
|
||||
|
||||
defineType("Module", {
|
||||
spec: {
|
||||
wasm: "https://webassembly.github.io/spec/core/binary/modules.html#binary-module",
|
||||
wat: "https://webassembly.github.io/spec/core/text/modules.html#text-module"
|
||||
},
|
||||
doc: "A module consists of a sequence of sections (termed fields in the text format).",
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
id: {
|
||||
maybe: true,
|
||||
type: "string"
|
||||
},
|
||||
fields: {
|
||||
array: true,
|
||||
type: "Node"
|
||||
},
|
||||
metadata: {
|
||||
optional: true,
|
||||
type: "ModuleMetadata"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ModuleMetadata", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
sections: {
|
||||
array: true,
|
||||
type: "SectionMetadata"
|
||||
},
|
||||
functionNames: {
|
||||
optional: true,
|
||||
array: true,
|
||||
type: "FunctionNameMetadata"
|
||||
},
|
||||
localNames: {
|
||||
optional: true,
|
||||
array: true,
|
||||
type: "ModuleMetadata"
|
||||
},
|
||||
producers: {
|
||||
optional: true,
|
||||
array: true,
|
||||
type: "ProducersSectionMetadata"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ModuleNameMetadata", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("FunctionNameMetadata", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "string"
|
||||
},
|
||||
index: {
|
||||
type: "number"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("LocalNameMetadata", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "string"
|
||||
},
|
||||
localIndex: {
|
||||
type: "number"
|
||||
},
|
||||
functionIndex: {
|
||||
type: "number"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("BinaryModule", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
id: {
|
||||
maybe: true,
|
||||
type: "string"
|
||||
},
|
||||
blob: {
|
||||
array: true,
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("QuoteModule", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
id: {
|
||||
maybe: true,
|
||||
type: "string"
|
||||
},
|
||||
string: {
|
||||
array: true,
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("SectionMetadata", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
section: {
|
||||
type: "SectionName"
|
||||
},
|
||||
startOffset: {
|
||||
type: "number"
|
||||
},
|
||||
size: {
|
||||
type: "NumberLiteral"
|
||||
},
|
||||
vectorOfSize: {
|
||||
comment: "Size of the vector in the section (if any)",
|
||||
type: "NumberLiteral"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ProducersSectionMetadata", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
producers: {
|
||||
array: true,
|
||||
type: "ProducerMetadata"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ProducerMetadata", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
language: {
|
||||
type: "ProducerMetadataVersionedName",
|
||||
array: true
|
||||
},
|
||||
processedBy: {
|
||||
type: "ProducerMetadataVersionedName",
|
||||
array: true
|
||||
},
|
||||
sdk: {
|
||||
type: "ProducerMetadataVersionedName",
|
||||
array: true
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ProducerMetadataVersionedName", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
name: {
|
||||
type: "string"
|
||||
},
|
||||
version: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
/*
|
||||
Instructions
|
||||
*/
|
||||
|
||||
defineType("LoopInstruction", {
|
||||
unionType: ["Node", "Block", "Instruction"],
|
||||
fields: {
|
||||
id: {
|
||||
constant: true,
|
||||
type: "string",
|
||||
value: "loop"
|
||||
},
|
||||
label: {
|
||||
maybe: true,
|
||||
type: "Identifier"
|
||||
},
|
||||
resulttype: {
|
||||
maybe: true,
|
||||
type: "Valtype"
|
||||
},
|
||||
instr: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Instr", {
|
||||
unionType: ["Node", "Expression", "Instruction"],
|
||||
fields: {
|
||||
id: {
|
||||
type: "string"
|
||||
},
|
||||
object: {
|
||||
optional: true,
|
||||
type: "Valtype"
|
||||
},
|
||||
args: {
|
||||
array: true,
|
||||
type: "Expression"
|
||||
},
|
||||
namedArgs: {
|
||||
optional: true,
|
||||
type: "Object"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("IfInstruction", {
|
||||
unionType: ["Node", "Instruction"],
|
||||
fields: {
|
||||
id: {
|
||||
constant: true,
|
||||
type: "string",
|
||||
value: "if"
|
||||
},
|
||||
testLabel: {
|
||||
comment: "only for WAST",
|
||||
type: "Identifier"
|
||||
},
|
||||
test: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
},
|
||||
result: {
|
||||
maybe: true,
|
||||
type: "Valtype"
|
||||
},
|
||||
consequent: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
},
|
||||
alternate: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
}
|
||||
}
|
||||
});
|
||||
/*
|
||||
Concrete value types
|
||||
*/
|
||||
|
||||
defineType("StringLiteral", {
|
||||
unionType: ["Node", "Expression"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("NumberLiteral", {
|
||||
unionType: ["Node", "NumericLiteral", "Expression"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "number"
|
||||
},
|
||||
raw: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("LongNumberLiteral", {
|
||||
unionType: ["Node", "NumericLiteral", "Expression"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "LongNumber"
|
||||
},
|
||||
raw: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("FloatLiteral", {
|
||||
unionType: ["Node", "NumericLiteral", "Expression"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "number"
|
||||
},
|
||||
nan: {
|
||||
optional: true,
|
||||
type: "boolean"
|
||||
},
|
||||
inf: {
|
||||
optional: true,
|
||||
type: "boolean"
|
||||
},
|
||||
raw: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Elem", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
table: {
|
||||
type: "Index"
|
||||
},
|
||||
offset: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
},
|
||||
funcs: {
|
||||
array: true,
|
||||
type: "Index"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("IndexInFuncSection", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
index: {
|
||||
type: "Index"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ValtypeLiteral", {
|
||||
unionType: ["Node", "Expression"],
|
||||
fields: {
|
||||
name: {
|
||||
type: "Valtype"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("TypeInstruction", {
|
||||
unionType: ["Node", "Instruction"],
|
||||
fields: {
|
||||
id: {
|
||||
maybe: true,
|
||||
type: "Index"
|
||||
},
|
||||
functype: {
|
||||
type: "Signature"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Start", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
index: {
|
||||
type: "Index"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("GlobalType", {
|
||||
unionType: ["Node", "ImportDescr"],
|
||||
fields: {
|
||||
valtype: {
|
||||
type: "Valtype"
|
||||
},
|
||||
mutability: {
|
||||
type: "Mutability"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("LeadingComment", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("BlockComment", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Data", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
memoryIndex: {
|
||||
type: "Memidx"
|
||||
},
|
||||
offset: {
|
||||
type: "Instruction"
|
||||
},
|
||||
init: {
|
||||
type: "ByteArray"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Global", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
globalType: {
|
||||
type: "GlobalType"
|
||||
},
|
||||
init: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
},
|
||||
name: {
|
||||
maybe: true,
|
||||
type: "Identifier"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Table", {
|
||||
unionType: ["Node", "ImportDescr"],
|
||||
fields: {
|
||||
elementType: {
|
||||
type: "TableElementType"
|
||||
},
|
||||
limits: {
|
||||
assertNodeType: true,
|
||||
type: "Limit"
|
||||
},
|
||||
name: {
|
||||
maybe: true,
|
||||
type: "Identifier"
|
||||
},
|
||||
elements: {
|
||||
array: true,
|
||||
optional: true,
|
||||
type: "Index"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Memory", {
|
||||
unionType: ["Node", "ImportDescr"],
|
||||
fields: {
|
||||
limits: {
|
||||
type: "Limit"
|
||||
},
|
||||
id: {
|
||||
maybe: true,
|
||||
type: "Index"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("FuncImportDescr", {
|
||||
unionType: ["Node", "ImportDescr"],
|
||||
fields: {
|
||||
id: {
|
||||
type: "Identifier"
|
||||
},
|
||||
signature: {
|
||||
type: "Signature"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ModuleImport", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
module: {
|
||||
type: "string"
|
||||
},
|
||||
name: {
|
||||
type: "string"
|
||||
},
|
||||
descr: {
|
||||
type: "ImportDescr"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ModuleExportDescr", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
exportType: {
|
||||
type: "ExportDescrType"
|
||||
},
|
||||
id: {
|
||||
type: "Index"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ModuleExport", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
name: {
|
||||
type: "string"
|
||||
},
|
||||
descr: {
|
||||
type: "ModuleExportDescr"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Limit", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
min: {
|
||||
type: "number"
|
||||
},
|
||||
max: {
|
||||
optional: true,
|
||||
type: "number"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Signature", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
params: {
|
||||
array: true,
|
||||
type: "FuncParam"
|
||||
},
|
||||
results: {
|
||||
array: true,
|
||||
type: "Valtype"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Program", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
body: {
|
||||
array: true,
|
||||
type: "Node"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Identifier", {
|
||||
unionType: ["Node", "Expression"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "string"
|
||||
},
|
||||
raw: {
|
||||
optional: true,
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("BlockInstruction", {
|
||||
unionType: ["Node", "Block", "Instruction"],
|
||||
fields: {
|
||||
id: {
|
||||
constant: true,
|
||||
type: "string",
|
||||
value: "block"
|
||||
},
|
||||
label: {
|
||||
maybe: true,
|
||||
type: "Identifier"
|
||||
},
|
||||
instr: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
},
|
||||
result: {
|
||||
maybe: true,
|
||||
type: "Valtype"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("CallInstruction", {
|
||||
unionType: ["Node", "Instruction"],
|
||||
fields: {
|
||||
id: {
|
||||
constant: true,
|
||||
type: "string",
|
||||
value: "call"
|
||||
},
|
||||
index: {
|
||||
type: "Index"
|
||||
},
|
||||
instrArgs: {
|
||||
array: true,
|
||||
optional: true,
|
||||
type: "Expression"
|
||||
},
|
||||
numeric: {
|
||||
type: "Index",
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("CallIndirectInstruction", {
|
||||
unionType: ["Node", "Instruction"],
|
||||
fields: {
|
||||
id: {
|
||||
constant: true,
|
||||
type: "string",
|
||||
value: "call_indirect"
|
||||
},
|
||||
signature: {
|
||||
type: "SignatureOrTypeRef"
|
||||
},
|
||||
intrs: {
|
||||
array: true,
|
||||
optional: true,
|
||||
type: "Expression"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ByteArray", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
values: {
|
||||
array: true,
|
||||
type: "Byte"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Func", {
|
||||
unionType: ["Node", "Block"],
|
||||
fields: {
|
||||
name: {
|
||||
maybe: true,
|
||||
type: "Index"
|
||||
},
|
||||
signature: {
|
||||
type: "SignatureOrTypeRef"
|
||||
},
|
||||
body: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
},
|
||||
isExternal: {
|
||||
comment: "means that it has been imported from the outside js",
|
||||
optional: true,
|
||||
type: "boolean"
|
||||
},
|
||||
metadata: {
|
||||
optional: true,
|
||||
type: "FuncMetadata"
|
||||
}
|
||||
}
|
||||
});
|
||||
/**
|
||||
* Intrinsics
|
||||
*/
|
||||
|
||||
defineType("InternalBrUnless", {
|
||||
unionType: ["Node", "Intrinsic"],
|
||||
fields: {
|
||||
target: {
|
||||
type: "number"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("InternalGoto", {
|
||||
unionType: ["Node", "Intrinsic"],
|
||||
fields: {
|
||||
target: {
|
||||
type: "number"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("InternalCallExtern", {
|
||||
unionType: ["Node", "Intrinsic"],
|
||||
fields: {
|
||||
target: {
|
||||
type: "number"
|
||||
}
|
||||
}
|
||||
}); // function bodies are terminated by an `end` instruction but are missing a
|
||||
// return instruction
|
||||
//
|
||||
// Since we can't inject a new instruction we are injecting a new instruction.
|
||||
|
||||
defineType("InternalEndAndReturn", {
|
||||
unionType: ["Node", "Intrinsic"],
|
||||
fields: {}
|
||||
});
|
||||
module.exports = definitions;
|
||||
6
node_modules/@webassemblyjs/ast/esm/index.js
generated
vendored
Normal file
6
node_modules/@webassemblyjs/ast/esm/index.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export * from "./nodes";
|
||||
export { numberLiteralFromRaw, withLoc, withRaw, funcParam, indexLiteral, memIndexLiteral, instruction, objectInstruction } from "./node-helpers.js";
|
||||
export { traverse } from "./traverse";
|
||||
export { signatures } from "./signatures";
|
||||
export * from "./utils";
|
||||
export { cloneNode } from "./clone";
|
||||
84
node_modules/@webassemblyjs/ast/esm/node-helpers.js
generated
vendored
Normal file
84
node_modules/@webassemblyjs/ast/esm/node-helpers.js
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
import { parse32F, parse64F, parse32I, parse64I, parseU32, isNanLiteral, isInfLiteral } from "@webassemblyjs/wast-parser";
|
||||
import { longNumberLiteral, floatLiteral, numberLiteral, instr } from "./nodes";
|
||||
export function numberLiteralFromRaw(rawValue) {
|
||||
var instructionType = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : "i32";
|
||||
var original = rawValue; // Remove numeric separators _
|
||||
|
||||
if (typeof rawValue === "string") {
|
||||
rawValue = rawValue.replace(/_/g, "");
|
||||
}
|
||||
|
||||
if (typeof rawValue === "number") {
|
||||
return numberLiteral(rawValue, String(original));
|
||||
} else {
|
||||
switch (instructionType) {
|
||||
case "i32":
|
||||
{
|
||||
return numberLiteral(parse32I(rawValue), String(original));
|
||||
}
|
||||
|
||||
case "u32":
|
||||
{
|
||||
return numberLiteral(parseU32(rawValue), String(original));
|
||||
}
|
||||
|
||||
case "i64":
|
||||
{
|
||||
return longNumberLiteral(parse64I(rawValue), String(original));
|
||||
}
|
||||
|
||||
case "f32":
|
||||
{
|
||||
return floatLiteral(parse32F(rawValue), isNanLiteral(rawValue), isInfLiteral(rawValue), String(original));
|
||||
}
|
||||
// f64
|
||||
|
||||
default:
|
||||
{
|
||||
return floatLiteral(parse64F(rawValue), isNanLiteral(rawValue), isInfLiteral(rawValue), String(original));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
export function instruction(id) {
|
||||
var args = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : [];
|
||||
var namedArgs = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
|
||||
return instr(id, undefined, args, namedArgs);
|
||||
}
|
||||
export function objectInstruction(id, object) {
|
||||
var args = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : [];
|
||||
var namedArgs = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
|
||||
return instr(id, object, args, namedArgs);
|
||||
}
|
||||
/**
|
||||
* Decorators
|
||||
*/
|
||||
|
||||
export function withLoc(n, end, start) {
|
||||
var loc = {
|
||||
start: start,
|
||||
end: end
|
||||
};
|
||||
n.loc = loc;
|
||||
return n;
|
||||
}
|
||||
export function withRaw(n, raw) {
|
||||
n.raw = raw;
|
||||
return n;
|
||||
}
|
||||
export function funcParam(valtype, id) {
|
||||
return {
|
||||
id: id,
|
||||
valtype: valtype
|
||||
};
|
||||
}
|
||||
export function indexLiteral(value) {
|
||||
// $FlowIgnore
|
||||
var x = numberLiteralFromRaw(value, "u32");
|
||||
return x;
|
||||
}
|
||||
export function memIndexLiteral(value) {
|
||||
// $FlowIgnore
|
||||
var x = numberLiteralFromRaw(value, "u32");
|
||||
return x;
|
||||
}
|
||||
137
node_modules/@webassemblyjs/ast/esm/node-path.js
generated
vendored
Normal file
137
node_modules/@webassemblyjs/ast/esm/node-path.js
generated
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
|
||||
|
||||
function findParent(_ref, cb) {
|
||||
var parentPath = _ref.parentPath;
|
||||
|
||||
if (parentPath == null) {
|
||||
throw new Error("node is root");
|
||||
}
|
||||
|
||||
var currentPath = parentPath;
|
||||
|
||||
while (cb(currentPath) !== false) {
|
||||
// Hit the root node, stop
|
||||
// $FlowIgnore
|
||||
if (currentPath.parentPath == null) {
|
||||
return null;
|
||||
} // $FlowIgnore
|
||||
|
||||
|
||||
currentPath = currentPath.parentPath;
|
||||
}
|
||||
|
||||
return currentPath.node;
|
||||
}
|
||||
|
||||
function insertBefore(context, newNode) {
|
||||
return insert(context, newNode);
|
||||
}
|
||||
|
||||
function insertAfter(context, newNode) {
|
||||
return insert(context, newNode, 1);
|
||||
}
|
||||
|
||||
function insert(_ref2, newNode) {
|
||||
var node = _ref2.node,
|
||||
inList = _ref2.inList,
|
||||
parentPath = _ref2.parentPath,
|
||||
parentKey = _ref2.parentKey;
|
||||
var indexOffset = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
|
||||
|
||||
if (!inList) {
|
||||
throw new Error('inList' + " error: " + ("insert can only be used for nodes that are within lists" || "unknown"));
|
||||
}
|
||||
|
||||
if (!(parentPath != null)) {
|
||||
throw new Error('parentPath != null' + " error: " + ("Can not remove root node" || "unknown"));
|
||||
}
|
||||
|
||||
// $FlowIgnore
|
||||
var parentList = parentPath.node[parentKey];
|
||||
var indexInList = parentList.findIndex(function (n) {
|
||||
return n === node;
|
||||
});
|
||||
parentList.splice(indexInList + indexOffset, 0, newNode);
|
||||
}
|
||||
|
||||
function remove(_ref3) {
|
||||
var node = _ref3.node,
|
||||
parentKey = _ref3.parentKey,
|
||||
parentPath = _ref3.parentPath;
|
||||
|
||||
if (!(parentPath != null)) {
|
||||
throw new Error('parentPath != null' + " error: " + ("Can not remove root node" || "unknown"));
|
||||
}
|
||||
|
||||
// $FlowIgnore
|
||||
var parentNode = parentPath.node; // $FlowIgnore
|
||||
|
||||
var parentProperty = parentNode[parentKey];
|
||||
|
||||
if (Array.isArray(parentProperty)) {
|
||||
// $FlowIgnore
|
||||
parentNode[parentKey] = parentProperty.filter(function (n) {
|
||||
return n !== node;
|
||||
});
|
||||
} else {
|
||||
// $FlowIgnore
|
||||
delete parentNode[parentKey];
|
||||
}
|
||||
|
||||
node._deleted = true;
|
||||
}
|
||||
|
||||
function stop(context) {
|
||||
context.shouldStop = true;
|
||||
}
|
||||
|
||||
function replaceWith(context, newNode) {
|
||||
// $FlowIgnore
|
||||
var parentNode = context.parentPath.node; // $FlowIgnore
|
||||
|
||||
var parentProperty = parentNode[context.parentKey];
|
||||
|
||||
if (Array.isArray(parentProperty)) {
|
||||
var indexInList = parentProperty.findIndex(function (n) {
|
||||
return n === context.node;
|
||||
});
|
||||
parentProperty.splice(indexInList, 1, newNode);
|
||||
} else {
|
||||
// $FlowIgnore
|
||||
parentNode[context.parentKey] = newNode;
|
||||
}
|
||||
|
||||
context.node._deleted = true;
|
||||
context.node = newNode;
|
||||
} // bind the context to the first argument of node operations
|
||||
|
||||
|
||||
function bindNodeOperations(operations, context) {
|
||||
var keys = Object.keys(operations);
|
||||
var boundOperations = {};
|
||||
keys.forEach(function (key) {
|
||||
boundOperations[key] = operations[key].bind(null, context);
|
||||
});
|
||||
return boundOperations;
|
||||
}
|
||||
|
||||
function createPathOperations(context) {
|
||||
// $FlowIgnore
|
||||
return bindNodeOperations({
|
||||
findParent: findParent,
|
||||
replaceWith: replaceWith,
|
||||
remove: remove,
|
||||
insertBefore: insertBefore,
|
||||
insertAfter: insertAfter,
|
||||
stop: stop
|
||||
}, context);
|
||||
}
|
||||
|
||||
export function createPath(context) {
|
||||
var path = _extends({}, context); // $FlowIgnore
|
||||
|
||||
|
||||
Object.assign(path, createPathOperations(path)); // $FlowIgnore
|
||||
|
||||
return path;
|
||||
}
|
||||
915
node_modules/@webassemblyjs/ast/esm/nodes.js
generated
vendored
Normal file
915
node_modules/@webassemblyjs/ast/esm/nodes.js
generated
vendored
Normal file
@@ -0,0 +1,915 @@
|
||||
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
|
||||
|
||||
// THIS FILE IS AUTOGENERATED
|
||||
// see scripts/generateNodeUtils.js
|
||||
function isTypeOf(t) {
|
||||
return function (n) {
|
||||
return n.type === t;
|
||||
};
|
||||
}
|
||||
|
||||
function assertTypeOf(t) {
|
||||
return function (n) {
|
||||
return function () {
|
||||
if (!(n.type === t)) {
|
||||
throw new Error('n.type === t' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
}();
|
||||
};
|
||||
}
|
||||
|
||||
export function module(id, fields, metadata) {
|
||||
if (id !== null && id !== undefined) {
|
||||
if (!(typeof id === "string")) {
|
||||
throw new Error('typeof id === "string"' + " error: " + ("Argument id must be of type string, given: " + _typeof(id) || "unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
if (!(_typeof(fields) === "object" && typeof fields.length !== "undefined")) {
|
||||
throw new Error('typeof fields === "object" && typeof fields.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "Module",
|
||||
id: id,
|
||||
fields: fields
|
||||
};
|
||||
|
||||
if (typeof metadata !== "undefined") {
|
||||
node.metadata = metadata;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
export function moduleMetadata(sections, functionNames, localNames, producers) {
|
||||
if (!(_typeof(sections) === "object" && typeof sections.length !== "undefined")) {
|
||||
throw new Error('typeof sections === "object" && typeof sections.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
if (functionNames !== null && functionNames !== undefined) {
|
||||
if (!(_typeof(functionNames) === "object" && typeof functionNames.length !== "undefined")) {
|
||||
throw new Error('typeof functionNames === "object" && typeof functionNames.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
if (localNames !== null && localNames !== undefined) {
|
||||
if (!(_typeof(localNames) === "object" && typeof localNames.length !== "undefined")) {
|
||||
throw new Error('typeof localNames === "object" && typeof localNames.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
if (producers !== null && producers !== undefined) {
|
||||
if (!(_typeof(producers) === "object" && typeof producers.length !== "undefined")) {
|
||||
throw new Error('typeof producers === "object" && typeof producers.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "ModuleMetadata",
|
||||
sections: sections
|
||||
};
|
||||
|
||||
if (typeof functionNames !== "undefined" && functionNames.length > 0) {
|
||||
node.functionNames = functionNames;
|
||||
}
|
||||
|
||||
if (typeof localNames !== "undefined" && localNames.length > 0) {
|
||||
node.localNames = localNames;
|
||||
}
|
||||
|
||||
if (typeof producers !== "undefined" && producers.length > 0) {
|
||||
node.producers = producers;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
export function moduleNameMetadata(value) {
|
||||
if (!(typeof value === "string")) {
|
||||
throw new Error('typeof value === "string"' + " error: " + ("Argument value must be of type string, given: " + _typeof(value) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "ModuleNameMetadata",
|
||||
value: value
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function functionNameMetadata(value, index) {
|
||||
if (!(typeof value === "string")) {
|
||||
throw new Error('typeof value === "string"' + " error: " + ("Argument value must be of type string, given: " + _typeof(value) || "unknown"));
|
||||
}
|
||||
|
||||
if (!(typeof index === "number")) {
|
||||
throw new Error('typeof index === "number"' + " error: " + ("Argument index must be of type number, given: " + _typeof(index) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "FunctionNameMetadata",
|
||||
value: value,
|
||||
index: index
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function localNameMetadata(value, localIndex, functionIndex) {
|
||||
if (!(typeof value === "string")) {
|
||||
throw new Error('typeof value === "string"' + " error: " + ("Argument value must be of type string, given: " + _typeof(value) || "unknown"));
|
||||
}
|
||||
|
||||
if (!(typeof localIndex === "number")) {
|
||||
throw new Error('typeof localIndex === "number"' + " error: " + ("Argument localIndex must be of type number, given: " + _typeof(localIndex) || "unknown"));
|
||||
}
|
||||
|
||||
if (!(typeof functionIndex === "number")) {
|
||||
throw new Error('typeof functionIndex === "number"' + " error: " + ("Argument functionIndex must be of type number, given: " + _typeof(functionIndex) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "LocalNameMetadata",
|
||||
value: value,
|
||||
localIndex: localIndex,
|
||||
functionIndex: functionIndex
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function binaryModule(id, blob) {
|
||||
if (id !== null && id !== undefined) {
|
||||
if (!(typeof id === "string")) {
|
||||
throw new Error('typeof id === "string"' + " error: " + ("Argument id must be of type string, given: " + _typeof(id) || "unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
if (!(_typeof(blob) === "object" && typeof blob.length !== "undefined")) {
|
||||
throw new Error('typeof blob === "object" && typeof blob.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "BinaryModule",
|
||||
id: id,
|
||||
blob: blob
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function quoteModule(id, string) {
|
||||
if (id !== null && id !== undefined) {
|
||||
if (!(typeof id === "string")) {
|
||||
throw new Error('typeof id === "string"' + " error: " + ("Argument id must be of type string, given: " + _typeof(id) || "unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
if (!(_typeof(string) === "object" && typeof string.length !== "undefined")) {
|
||||
throw new Error('typeof string === "object" && typeof string.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "QuoteModule",
|
||||
id: id,
|
||||
string: string
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function sectionMetadata(section, startOffset, size, vectorOfSize) {
|
||||
if (!(typeof startOffset === "number")) {
|
||||
throw new Error('typeof startOffset === "number"' + " error: " + ("Argument startOffset must be of type number, given: " + _typeof(startOffset) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "SectionMetadata",
|
||||
section: section,
|
||||
startOffset: startOffset,
|
||||
size: size,
|
||||
vectorOfSize: vectorOfSize
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function producersSectionMetadata(producers) {
|
||||
if (!(_typeof(producers) === "object" && typeof producers.length !== "undefined")) {
|
||||
throw new Error('typeof producers === "object" && typeof producers.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "ProducersSectionMetadata",
|
||||
producers: producers
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function producerMetadata(language, processedBy, sdk) {
|
||||
if (!(_typeof(language) === "object" && typeof language.length !== "undefined")) {
|
||||
throw new Error('typeof language === "object" && typeof language.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
if (!(_typeof(processedBy) === "object" && typeof processedBy.length !== "undefined")) {
|
||||
throw new Error('typeof processedBy === "object" && typeof processedBy.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
if (!(_typeof(sdk) === "object" && typeof sdk.length !== "undefined")) {
|
||||
throw new Error('typeof sdk === "object" && typeof sdk.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "ProducerMetadata",
|
||||
language: language,
|
||||
processedBy: processedBy,
|
||||
sdk: sdk
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function producerMetadataVersionedName(name, version) {
|
||||
if (!(typeof name === "string")) {
|
||||
throw new Error('typeof name === "string"' + " error: " + ("Argument name must be of type string, given: " + _typeof(name) || "unknown"));
|
||||
}
|
||||
|
||||
if (!(typeof version === "string")) {
|
||||
throw new Error('typeof version === "string"' + " error: " + ("Argument version must be of type string, given: " + _typeof(version) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "ProducerMetadataVersionedName",
|
||||
name: name,
|
||||
version: version
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function loopInstruction(label, resulttype, instr) {
|
||||
if (!(_typeof(instr) === "object" && typeof instr.length !== "undefined")) {
|
||||
throw new Error('typeof instr === "object" && typeof instr.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "LoopInstruction",
|
||||
id: "loop",
|
||||
label: label,
|
||||
resulttype: resulttype,
|
||||
instr: instr
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function instr(id, object, args, namedArgs) {
|
||||
if (!(typeof id === "string")) {
|
||||
throw new Error('typeof id === "string"' + " error: " + ("Argument id must be of type string, given: " + _typeof(id) || "unknown"));
|
||||
}
|
||||
|
||||
if (!(_typeof(args) === "object" && typeof args.length !== "undefined")) {
|
||||
throw new Error('typeof args === "object" && typeof args.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "Instr",
|
||||
id: id,
|
||||
args: args
|
||||
};
|
||||
|
||||
if (typeof object !== "undefined") {
|
||||
node.object = object;
|
||||
}
|
||||
|
||||
if (typeof namedArgs !== "undefined" && Object.keys(namedArgs).length !== 0) {
|
||||
node.namedArgs = namedArgs;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
export function ifInstruction(testLabel, test, result, consequent, alternate) {
|
||||
if (!(_typeof(test) === "object" && typeof test.length !== "undefined")) {
|
||||
throw new Error('typeof test === "object" && typeof test.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
if (!(_typeof(consequent) === "object" && typeof consequent.length !== "undefined")) {
|
||||
throw new Error('typeof consequent === "object" && typeof consequent.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
if (!(_typeof(alternate) === "object" && typeof alternate.length !== "undefined")) {
|
||||
throw new Error('typeof alternate === "object" && typeof alternate.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "IfInstruction",
|
||||
id: "if",
|
||||
testLabel: testLabel,
|
||||
test: test,
|
||||
result: result,
|
||||
consequent: consequent,
|
||||
alternate: alternate
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function stringLiteral(value) {
|
||||
if (!(typeof value === "string")) {
|
||||
throw new Error('typeof value === "string"' + " error: " + ("Argument value must be of type string, given: " + _typeof(value) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "StringLiteral",
|
||||
value: value
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function numberLiteral(value, raw) {
|
||||
if (!(typeof value === "number")) {
|
||||
throw new Error('typeof value === "number"' + " error: " + ("Argument value must be of type number, given: " + _typeof(value) || "unknown"));
|
||||
}
|
||||
|
||||
if (!(typeof raw === "string")) {
|
||||
throw new Error('typeof raw === "string"' + " error: " + ("Argument raw must be of type string, given: " + _typeof(raw) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "NumberLiteral",
|
||||
value: value,
|
||||
raw: raw
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function longNumberLiteral(value, raw) {
|
||||
if (!(typeof raw === "string")) {
|
||||
throw new Error('typeof raw === "string"' + " error: " + ("Argument raw must be of type string, given: " + _typeof(raw) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "LongNumberLiteral",
|
||||
value: value,
|
||||
raw: raw
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function floatLiteral(value, nan, inf, raw) {
|
||||
if (!(typeof value === "number")) {
|
||||
throw new Error('typeof value === "number"' + " error: " + ("Argument value must be of type number, given: " + _typeof(value) || "unknown"));
|
||||
}
|
||||
|
||||
if (nan !== null && nan !== undefined) {
|
||||
if (!(typeof nan === "boolean")) {
|
||||
throw new Error('typeof nan === "boolean"' + " error: " + ("Argument nan must be of type boolean, given: " + _typeof(nan) || "unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
if (inf !== null && inf !== undefined) {
|
||||
if (!(typeof inf === "boolean")) {
|
||||
throw new Error('typeof inf === "boolean"' + " error: " + ("Argument inf must be of type boolean, given: " + _typeof(inf) || "unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
if (!(typeof raw === "string")) {
|
||||
throw new Error('typeof raw === "string"' + " error: " + ("Argument raw must be of type string, given: " + _typeof(raw) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "FloatLiteral",
|
||||
value: value,
|
||||
raw: raw
|
||||
};
|
||||
|
||||
if (nan === true) {
|
||||
node.nan = true;
|
||||
}
|
||||
|
||||
if (inf === true) {
|
||||
node.inf = true;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
export function elem(table, offset, funcs) {
|
||||
if (!(_typeof(offset) === "object" && typeof offset.length !== "undefined")) {
|
||||
throw new Error('typeof offset === "object" && typeof offset.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
if (!(_typeof(funcs) === "object" && typeof funcs.length !== "undefined")) {
|
||||
throw new Error('typeof funcs === "object" && typeof funcs.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "Elem",
|
||||
table: table,
|
||||
offset: offset,
|
||||
funcs: funcs
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function indexInFuncSection(index) {
|
||||
var node = {
|
||||
type: "IndexInFuncSection",
|
||||
index: index
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function valtypeLiteral(name) {
|
||||
var node = {
|
||||
type: "ValtypeLiteral",
|
||||
name: name
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function typeInstruction(id, functype) {
|
||||
var node = {
|
||||
type: "TypeInstruction",
|
||||
id: id,
|
||||
functype: functype
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function start(index) {
|
||||
var node = {
|
||||
type: "Start",
|
||||
index: index
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function globalType(valtype, mutability) {
|
||||
var node = {
|
||||
type: "GlobalType",
|
||||
valtype: valtype,
|
||||
mutability: mutability
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function leadingComment(value) {
|
||||
if (!(typeof value === "string")) {
|
||||
throw new Error('typeof value === "string"' + " error: " + ("Argument value must be of type string, given: " + _typeof(value) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "LeadingComment",
|
||||
value: value
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function blockComment(value) {
|
||||
if (!(typeof value === "string")) {
|
||||
throw new Error('typeof value === "string"' + " error: " + ("Argument value must be of type string, given: " + _typeof(value) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "BlockComment",
|
||||
value: value
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function data(memoryIndex, offset, init) {
|
||||
var node = {
|
||||
type: "Data",
|
||||
memoryIndex: memoryIndex,
|
||||
offset: offset,
|
||||
init: init
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function global(globalType, init, name) {
|
||||
if (!(_typeof(init) === "object" && typeof init.length !== "undefined")) {
|
||||
throw new Error('typeof init === "object" && typeof init.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "Global",
|
||||
globalType: globalType,
|
||||
init: init,
|
||||
name: name
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function table(elementType, limits, name, elements) {
|
||||
if (!(limits.type === "Limit")) {
|
||||
throw new Error('limits.type === "Limit"' + " error: " + ("Argument limits must be of type Limit, given: " + limits.type || "unknown"));
|
||||
}
|
||||
|
||||
if (elements !== null && elements !== undefined) {
|
||||
if (!(_typeof(elements) === "object" && typeof elements.length !== "undefined")) {
|
||||
throw new Error('typeof elements === "object" && typeof elements.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "Table",
|
||||
elementType: elementType,
|
||||
limits: limits,
|
||||
name: name
|
||||
};
|
||||
|
||||
if (typeof elements !== "undefined" && elements.length > 0) {
|
||||
node.elements = elements;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
export function memory(limits, id) {
|
||||
var node = {
|
||||
type: "Memory",
|
||||
limits: limits,
|
||||
id: id
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function funcImportDescr(id, signature) {
|
||||
var node = {
|
||||
type: "FuncImportDescr",
|
||||
id: id,
|
||||
signature: signature
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function moduleImport(module, name, descr) {
|
||||
if (!(typeof module === "string")) {
|
||||
throw new Error('typeof module === "string"' + " error: " + ("Argument module must be of type string, given: " + _typeof(module) || "unknown"));
|
||||
}
|
||||
|
||||
if (!(typeof name === "string")) {
|
||||
throw new Error('typeof name === "string"' + " error: " + ("Argument name must be of type string, given: " + _typeof(name) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "ModuleImport",
|
||||
module: module,
|
||||
name: name,
|
||||
descr: descr
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function moduleExportDescr(exportType, id) {
|
||||
var node = {
|
||||
type: "ModuleExportDescr",
|
||||
exportType: exportType,
|
||||
id: id
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function moduleExport(name, descr) {
|
||||
if (!(typeof name === "string")) {
|
||||
throw new Error('typeof name === "string"' + " error: " + ("Argument name must be of type string, given: " + _typeof(name) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "ModuleExport",
|
||||
name: name,
|
||||
descr: descr
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function limit(min, max) {
|
||||
if (!(typeof min === "number")) {
|
||||
throw new Error('typeof min === "number"' + " error: " + ("Argument min must be of type number, given: " + _typeof(min) || "unknown"));
|
||||
}
|
||||
|
||||
if (max !== null && max !== undefined) {
|
||||
if (!(typeof max === "number")) {
|
||||
throw new Error('typeof max === "number"' + " error: " + ("Argument max must be of type number, given: " + _typeof(max) || "unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "Limit",
|
||||
min: min
|
||||
};
|
||||
|
||||
if (typeof max !== "undefined") {
|
||||
node.max = max;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
export function signature(params, results) {
|
||||
if (!(_typeof(params) === "object" && typeof params.length !== "undefined")) {
|
||||
throw new Error('typeof params === "object" && typeof params.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
if (!(_typeof(results) === "object" && typeof results.length !== "undefined")) {
|
||||
throw new Error('typeof results === "object" && typeof results.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "Signature",
|
||||
params: params,
|
||||
results: results
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function program(body) {
|
||||
if (!(_typeof(body) === "object" && typeof body.length !== "undefined")) {
|
||||
throw new Error('typeof body === "object" && typeof body.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "Program",
|
||||
body: body
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function identifier(value, raw) {
|
||||
if (!(typeof value === "string")) {
|
||||
throw new Error('typeof value === "string"' + " error: " + ("Argument value must be of type string, given: " + _typeof(value) || "unknown"));
|
||||
}
|
||||
|
||||
if (raw !== null && raw !== undefined) {
|
||||
if (!(typeof raw === "string")) {
|
||||
throw new Error('typeof raw === "string"' + " error: " + ("Argument raw must be of type string, given: " + _typeof(raw) || "unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "Identifier",
|
||||
value: value
|
||||
};
|
||||
|
||||
if (typeof raw !== "undefined") {
|
||||
node.raw = raw;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
export function blockInstruction(label, instr, result) {
|
||||
if (!(_typeof(instr) === "object" && typeof instr.length !== "undefined")) {
|
||||
throw new Error('typeof instr === "object" && typeof instr.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "BlockInstruction",
|
||||
id: "block",
|
||||
label: label,
|
||||
instr: instr,
|
||||
result: result
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function callInstruction(index, instrArgs, numeric) {
|
||||
if (instrArgs !== null && instrArgs !== undefined) {
|
||||
if (!(_typeof(instrArgs) === "object" && typeof instrArgs.length !== "undefined")) {
|
||||
throw new Error('typeof instrArgs === "object" && typeof instrArgs.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "CallInstruction",
|
||||
id: "call",
|
||||
index: index
|
||||
};
|
||||
|
||||
if (typeof instrArgs !== "undefined" && instrArgs.length > 0) {
|
||||
node.instrArgs = instrArgs;
|
||||
}
|
||||
|
||||
if (typeof numeric !== "undefined") {
|
||||
node.numeric = numeric;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
export function callIndirectInstruction(signature, intrs) {
|
||||
if (intrs !== null && intrs !== undefined) {
|
||||
if (!(_typeof(intrs) === "object" && typeof intrs.length !== "undefined")) {
|
||||
throw new Error('typeof intrs === "object" && typeof intrs.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "CallIndirectInstruction",
|
||||
id: "call_indirect",
|
||||
signature: signature
|
||||
};
|
||||
|
||||
if (typeof intrs !== "undefined" && intrs.length > 0) {
|
||||
node.intrs = intrs;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
export function byteArray(values) {
|
||||
if (!(_typeof(values) === "object" && typeof values.length !== "undefined")) {
|
||||
throw new Error('typeof values === "object" && typeof values.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "ByteArray",
|
||||
values: values
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function func(name, signature, body, isExternal, metadata) {
|
||||
if (!(_typeof(body) === "object" && typeof body.length !== "undefined")) {
|
||||
throw new Error('typeof body === "object" && typeof body.length !== "undefined"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
if (isExternal !== null && isExternal !== undefined) {
|
||||
if (!(typeof isExternal === "boolean")) {
|
||||
throw new Error('typeof isExternal === "boolean"' + " error: " + ("Argument isExternal must be of type boolean, given: " + _typeof(isExternal) || "unknown"));
|
||||
}
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "Func",
|
||||
name: name,
|
||||
signature: signature,
|
||||
body: body
|
||||
};
|
||||
|
||||
if (isExternal === true) {
|
||||
node.isExternal = true;
|
||||
}
|
||||
|
||||
if (typeof metadata !== "undefined") {
|
||||
node.metadata = metadata;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
export function internalBrUnless(target) {
|
||||
if (!(typeof target === "number")) {
|
||||
throw new Error('typeof target === "number"' + " error: " + ("Argument target must be of type number, given: " + _typeof(target) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "InternalBrUnless",
|
||||
target: target
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function internalGoto(target) {
|
||||
if (!(typeof target === "number")) {
|
||||
throw new Error('typeof target === "number"' + " error: " + ("Argument target must be of type number, given: " + _typeof(target) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "InternalGoto",
|
||||
target: target
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function internalCallExtern(target) {
|
||||
if (!(typeof target === "number")) {
|
||||
throw new Error('typeof target === "number"' + " error: " + ("Argument target must be of type number, given: " + _typeof(target) || "unknown"));
|
||||
}
|
||||
|
||||
var node = {
|
||||
type: "InternalCallExtern",
|
||||
target: target
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export function internalEndAndReturn() {
|
||||
var node = {
|
||||
type: "InternalEndAndReturn"
|
||||
};
|
||||
return node;
|
||||
}
|
||||
export var isModule = isTypeOf("Module");
|
||||
export var isModuleMetadata = isTypeOf("ModuleMetadata");
|
||||
export var isModuleNameMetadata = isTypeOf("ModuleNameMetadata");
|
||||
export var isFunctionNameMetadata = isTypeOf("FunctionNameMetadata");
|
||||
export var isLocalNameMetadata = isTypeOf("LocalNameMetadata");
|
||||
export var isBinaryModule = isTypeOf("BinaryModule");
|
||||
export var isQuoteModule = isTypeOf("QuoteModule");
|
||||
export var isSectionMetadata = isTypeOf("SectionMetadata");
|
||||
export var isProducersSectionMetadata = isTypeOf("ProducersSectionMetadata");
|
||||
export var isProducerMetadata = isTypeOf("ProducerMetadata");
|
||||
export var isProducerMetadataVersionedName = isTypeOf("ProducerMetadataVersionedName");
|
||||
export var isLoopInstruction = isTypeOf("LoopInstruction");
|
||||
export var isInstr = isTypeOf("Instr");
|
||||
export var isIfInstruction = isTypeOf("IfInstruction");
|
||||
export var isStringLiteral = isTypeOf("StringLiteral");
|
||||
export var isNumberLiteral = isTypeOf("NumberLiteral");
|
||||
export var isLongNumberLiteral = isTypeOf("LongNumberLiteral");
|
||||
export var isFloatLiteral = isTypeOf("FloatLiteral");
|
||||
export var isElem = isTypeOf("Elem");
|
||||
export var isIndexInFuncSection = isTypeOf("IndexInFuncSection");
|
||||
export var isValtypeLiteral = isTypeOf("ValtypeLiteral");
|
||||
export var isTypeInstruction = isTypeOf("TypeInstruction");
|
||||
export var isStart = isTypeOf("Start");
|
||||
export var isGlobalType = isTypeOf("GlobalType");
|
||||
export var isLeadingComment = isTypeOf("LeadingComment");
|
||||
export var isBlockComment = isTypeOf("BlockComment");
|
||||
export var isData = isTypeOf("Data");
|
||||
export var isGlobal = isTypeOf("Global");
|
||||
export var isTable = isTypeOf("Table");
|
||||
export var isMemory = isTypeOf("Memory");
|
||||
export var isFuncImportDescr = isTypeOf("FuncImportDescr");
|
||||
export var isModuleImport = isTypeOf("ModuleImport");
|
||||
export var isModuleExportDescr = isTypeOf("ModuleExportDescr");
|
||||
export var isModuleExport = isTypeOf("ModuleExport");
|
||||
export var isLimit = isTypeOf("Limit");
|
||||
export var isSignature = isTypeOf("Signature");
|
||||
export var isProgram = isTypeOf("Program");
|
||||
export var isIdentifier = isTypeOf("Identifier");
|
||||
export var isBlockInstruction = isTypeOf("BlockInstruction");
|
||||
export var isCallInstruction = isTypeOf("CallInstruction");
|
||||
export var isCallIndirectInstruction = isTypeOf("CallIndirectInstruction");
|
||||
export var isByteArray = isTypeOf("ByteArray");
|
||||
export var isFunc = isTypeOf("Func");
|
||||
export var isInternalBrUnless = isTypeOf("InternalBrUnless");
|
||||
export var isInternalGoto = isTypeOf("InternalGoto");
|
||||
export var isInternalCallExtern = isTypeOf("InternalCallExtern");
|
||||
export var isInternalEndAndReturn = isTypeOf("InternalEndAndReturn");
|
||||
export var isNode = function isNode(node) {
|
||||
return isModule(node) || isModuleMetadata(node) || isModuleNameMetadata(node) || isFunctionNameMetadata(node) || isLocalNameMetadata(node) || isBinaryModule(node) || isQuoteModule(node) || isSectionMetadata(node) || isProducersSectionMetadata(node) || isProducerMetadata(node) || isProducerMetadataVersionedName(node) || isLoopInstruction(node) || isInstr(node) || isIfInstruction(node) || isStringLiteral(node) || isNumberLiteral(node) || isLongNumberLiteral(node) || isFloatLiteral(node) || isElem(node) || isIndexInFuncSection(node) || isValtypeLiteral(node) || isTypeInstruction(node) || isStart(node) || isGlobalType(node) || isLeadingComment(node) || isBlockComment(node) || isData(node) || isGlobal(node) || isTable(node) || isMemory(node) || isFuncImportDescr(node) || isModuleImport(node) || isModuleExportDescr(node) || isModuleExport(node) || isLimit(node) || isSignature(node) || isProgram(node) || isIdentifier(node) || isBlockInstruction(node) || isCallInstruction(node) || isCallIndirectInstruction(node) || isByteArray(node) || isFunc(node) || isInternalBrUnless(node) || isInternalGoto(node) || isInternalCallExtern(node) || isInternalEndAndReturn(node);
|
||||
};
|
||||
export var isBlock = function isBlock(node) {
|
||||
return isLoopInstruction(node) || isBlockInstruction(node) || isFunc(node);
|
||||
};
|
||||
export var isInstruction = function isInstruction(node) {
|
||||
return isLoopInstruction(node) || isInstr(node) || isIfInstruction(node) || isTypeInstruction(node) || isBlockInstruction(node) || isCallInstruction(node) || isCallIndirectInstruction(node);
|
||||
};
|
||||
export var isExpression = function isExpression(node) {
|
||||
return isInstr(node) || isStringLiteral(node) || isNumberLiteral(node) || isLongNumberLiteral(node) || isFloatLiteral(node) || isValtypeLiteral(node) || isIdentifier(node);
|
||||
};
|
||||
export var isNumericLiteral = function isNumericLiteral(node) {
|
||||
return isNumberLiteral(node) || isLongNumberLiteral(node) || isFloatLiteral(node);
|
||||
};
|
||||
export var isImportDescr = function isImportDescr(node) {
|
||||
return isGlobalType(node) || isTable(node) || isMemory(node) || isFuncImportDescr(node);
|
||||
};
|
||||
export var isIntrinsic = function isIntrinsic(node) {
|
||||
return isInternalBrUnless(node) || isInternalGoto(node) || isInternalCallExtern(node) || isInternalEndAndReturn(node);
|
||||
};
|
||||
export var assertModule = assertTypeOf("Module");
|
||||
export var assertModuleMetadata = assertTypeOf("ModuleMetadata");
|
||||
export var assertModuleNameMetadata = assertTypeOf("ModuleNameMetadata");
|
||||
export var assertFunctionNameMetadata = assertTypeOf("FunctionNameMetadata");
|
||||
export var assertLocalNameMetadata = assertTypeOf("LocalNameMetadata");
|
||||
export var assertBinaryModule = assertTypeOf("BinaryModule");
|
||||
export var assertQuoteModule = assertTypeOf("QuoteModule");
|
||||
export var assertSectionMetadata = assertTypeOf("SectionMetadata");
|
||||
export var assertProducersSectionMetadata = assertTypeOf("ProducersSectionMetadata");
|
||||
export var assertProducerMetadata = assertTypeOf("ProducerMetadata");
|
||||
export var assertProducerMetadataVersionedName = assertTypeOf("ProducerMetadataVersionedName");
|
||||
export var assertLoopInstruction = assertTypeOf("LoopInstruction");
|
||||
export var assertInstr = assertTypeOf("Instr");
|
||||
export var assertIfInstruction = assertTypeOf("IfInstruction");
|
||||
export var assertStringLiteral = assertTypeOf("StringLiteral");
|
||||
export var assertNumberLiteral = assertTypeOf("NumberLiteral");
|
||||
export var assertLongNumberLiteral = assertTypeOf("LongNumberLiteral");
|
||||
export var assertFloatLiteral = assertTypeOf("FloatLiteral");
|
||||
export var assertElem = assertTypeOf("Elem");
|
||||
export var assertIndexInFuncSection = assertTypeOf("IndexInFuncSection");
|
||||
export var assertValtypeLiteral = assertTypeOf("ValtypeLiteral");
|
||||
export var assertTypeInstruction = assertTypeOf("TypeInstruction");
|
||||
export var assertStart = assertTypeOf("Start");
|
||||
export var assertGlobalType = assertTypeOf("GlobalType");
|
||||
export var assertLeadingComment = assertTypeOf("LeadingComment");
|
||||
export var assertBlockComment = assertTypeOf("BlockComment");
|
||||
export var assertData = assertTypeOf("Data");
|
||||
export var assertGlobal = assertTypeOf("Global");
|
||||
export var assertTable = assertTypeOf("Table");
|
||||
export var assertMemory = assertTypeOf("Memory");
|
||||
export var assertFuncImportDescr = assertTypeOf("FuncImportDescr");
|
||||
export var assertModuleImport = assertTypeOf("ModuleImport");
|
||||
export var assertModuleExportDescr = assertTypeOf("ModuleExportDescr");
|
||||
export var assertModuleExport = assertTypeOf("ModuleExport");
|
||||
export var assertLimit = assertTypeOf("Limit");
|
||||
export var assertSignature = assertTypeOf("Signature");
|
||||
export var assertProgram = assertTypeOf("Program");
|
||||
export var assertIdentifier = assertTypeOf("Identifier");
|
||||
export var assertBlockInstruction = assertTypeOf("BlockInstruction");
|
||||
export var assertCallInstruction = assertTypeOf("CallInstruction");
|
||||
export var assertCallIndirectInstruction = assertTypeOf("CallIndirectInstruction");
|
||||
export var assertByteArray = assertTypeOf("ByteArray");
|
||||
export var assertFunc = assertTypeOf("Func");
|
||||
export var assertInternalBrUnless = assertTypeOf("InternalBrUnless");
|
||||
export var assertInternalGoto = assertTypeOf("InternalGoto");
|
||||
export var assertInternalCallExtern = assertTypeOf("InternalCallExtern");
|
||||
export var assertInternalEndAndReturn = assertTypeOf("InternalEndAndReturn");
|
||||
export var unionTypesMap = {
|
||||
Module: ["Node"],
|
||||
ModuleMetadata: ["Node"],
|
||||
ModuleNameMetadata: ["Node"],
|
||||
FunctionNameMetadata: ["Node"],
|
||||
LocalNameMetadata: ["Node"],
|
||||
BinaryModule: ["Node"],
|
||||
QuoteModule: ["Node"],
|
||||
SectionMetadata: ["Node"],
|
||||
ProducersSectionMetadata: ["Node"],
|
||||
ProducerMetadata: ["Node"],
|
||||
ProducerMetadataVersionedName: ["Node"],
|
||||
LoopInstruction: ["Node", "Block", "Instruction"],
|
||||
Instr: ["Node", "Expression", "Instruction"],
|
||||
IfInstruction: ["Node", "Instruction"],
|
||||
StringLiteral: ["Node", "Expression"],
|
||||
NumberLiteral: ["Node", "NumericLiteral", "Expression"],
|
||||
LongNumberLiteral: ["Node", "NumericLiteral", "Expression"],
|
||||
FloatLiteral: ["Node", "NumericLiteral", "Expression"],
|
||||
Elem: ["Node"],
|
||||
IndexInFuncSection: ["Node"],
|
||||
ValtypeLiteral: ["Node", "Expression"],
|
||||
TypeInstruction: ["Node", "Instruction"],
|
||||
Start: ["Node"],
|
||||
GlobalType: ["Node", "ImportDescr"],
|
||||
LeadingComment: ["Node"],
|
||||
BlockComment: ["Node"],
|
||||
Data: ["Node"],
|
||||
Global: ["Node"],
|
||||
Table: ["Node", "ImportDescr"],
|
||||
Memory: ["Node", "ImportDescr"],
|
||||
FuncImportDescr: ["Node", "ImportDescr"],
|
||||
ModuleImport: ["Node"],
|
||||
ModuleExportDescr: ["Node"],
|
||||
ModuleExport: ["Node"],
|
||||
Limit: ["Node"],
|
||||
Signature: ["Node"],
|
||||
Program: ["Node"],
|
||||
Identifier: ["Node", "Expression"],
|
||||
BlockInstruction: ["Node", "Block", "Instruction"],
|
||||
CallInstruction: ["Node", "Instruction"],
|
||||
CallIndirectInstruction: ["Node", "Instruction"],
|
||||
ByteArray: ["Node"],
|
||||
Func: ["Node", "Block"],
|
||||
InternalBrUnless: ["Node", "Intrinsic"],
|
||||
InternalGoto: ["Node", "Intrinsic"],
|
||||
InternalCallExtern: ["Node", "Intrinsic"],
|
||||
InternalEndAndReturn: ["Node", "Intrinsic"]
|
||||
};
|
||||
export var nodeAndUnionTypes = ["Module", "ModuleMetadata", "ModuleNameMetadata", "FunctionNameMetadata", "LocalNameMetadata", "BinaryModule", "QuoteModule", "SectionMetadata", "ProducersSectionMetadata", "ProducerMetadata", "ProducerMetadataVersionedName", "LoopInstruction", "Instr", "IfInstruction", "StringLiteral", "NumberLiteral", "LongNumberLiteral", "FloatLiteral", "Elem", "IndexInFuncSection", "ValtypeLiteral", "TypeInstruction", "Start", "GlobalType", "LeadingComment", "BlockComment", "Data", "Global", "Table", "Memory", "FuncImportDescr", "ModuleImport", "ModuleExportDescr", "ModuleExport", "Limit", "Signature", "Program", "Identifier", "BlockInstruction", "CallInstruction", "CallIndirectInstruction", "ByteArray", "Func", "InternalBrUnless", "InternalGoto", "InternalCallExtern", "InternalEndAndReturn", "Node", "Block", "Instruction", "Expression", "NumericLiteral", "ImportDescr", "Intrinsic"];
|
||||
199
node_modules/@webassemblyjs/ast/esm/signatures.js
generated
vendored
Normal file
199
node_modules/@webassemblyjs/ast/esm/signatures.js
generated
vendored
Normal file
@@ -0,0 +1,199 @@
|
||||
function sign(input, output) {
|
||||
return [input, output];
|
||||
}
|
||||
|
||||
var u32 = "u32";
|
||||
var i32 = "i32";
|
||||
var i64 = "i64";
|
||||
var f32 = "f32";
|
||||
var f64 = "f64";
|
||||
|
||||
var vector = function vector(t) {
|
||||
var vecType = [t]; // $FlowIgnore
|
||||
|
||||
vecType.vector = true;
|
||||
return vecType;
|
||||
};
|
||||
|
||||
var controlInstructions = {
|
||||
unreachable: sign([], []),
|
||||
nop: sign([], []),
|
||||
// block ?
|
||||
// loop ?
|
||||
// if ?
|
||||
// if else ?
|
||||
br: sign([u32], []),
|
||||
br_if: sign([u32], []),
|
||||
br_table: sign(vector(u32), []),
|
||||
return: sign([], []),
|
||||
call: sign([u32], []),
|
||||
call_indirect: sign([u32], [])
|
||||
};
|
||||
var parametricInstructions = {
|
||||
drop: sign([], []),
|
||||
select: sign([], [])
|
||||
};
|
||||
var variableInstructions = {
|
||||
get_local: sign([u32], []),
|
||||
set_local: sign([u32], []),
|
||||
tee_local: sign([u32], []),
|
||||
get_global: sign([u32], []),
|
||||
set_global: sign([u32], [])
|
||||
};
|
||||
var memoryInstructions = {
|
||||
"i32.load": sign([u32, u32], [i32]),
|
||||
"i64.load": sign([u32, u32], []),
|
||||
"f32.load": sign([u32, u32], []),
|
||||
"f64.load": sign([u32, u32], []),
|
||||
"i32.load8_s": sign([u32, u32], [i32]),
|
||||
"i32.load8_u": sign([u32, u32], [i32]),
|
||||
"i32.load16_s": sign([u32, u32], [i32]),
|
||||
"i32.load16_u": sign([u32, u32], [i32]),
|
||||
"i64.load8_s": sign([u32, u32], [i64]),
|
||||
"i64.load8_u": sign([u32, u32], [i64]),
|
||||
"i64.load16_s": sign([u32, u32], [i64]),
|
||||
"i64.load16_u": sign([u32, u32], [i64]),
|
||||
"i64.load32_s": sign([u32, u32], [i64]),
|
||||
"i64.load32_u": sign([u32, u32], [i64]),
|
||||
"i32.store": sign([u32, u32], []),
|
||||
"i64.store": sign([u32, u32], []),
|
||||
"f32.store": sign([u32, u32], []),
|
||||
"f64.store": sign([u32, u32], []),
|
||||
"i32.store8": sign([u32, u32], []),
|
||||
"i32.store16": sign([u32, u32], []),
|
||||
"i64.store8": sign([u32, u32], []),
|
||||
"i64.store16": sign([u32, u32], []),
|
||||
"i64.store32": sign([u32, u32], []),
|
||||
current_memory: sign([], []),
|
||||
grow_memory: sign([], [])
|
||||
};
|
||||
var numericInstructions = {
|
||||
"i32.const": sign([i32], [i32]),
|
||||
"i64.const": sign([i64], [i64]),
|
||||
"f32.const": sign([f32], [f32]),
|
||||
"f64.const": sign([f64], [f64]),
|
||||
"i32.eqz": sign([i32], [i32]),
|
||||
"i32.eq": sign([i32, i32], [i32]),
|
||||
"i32.ne": sign([i32, i32], [i32]),
|
||||
"i32.lt_s": sign([i32, i32], [i32]),
|
||||
"i32.lt_u": sign([i32, i32], [i32]),
|
||||
"i32.gt_s": sign([i32, i32], [i32]),
|
||||
"i32.gt_u": sign([i32, i32], [i32]),
|
||||
"i32.le_s": sign([i32, i32], [i32]),
|
||||
"i32.le_u": sign([i32, i32], [i32]),
|
||||
"i32.ge_s": sign([i32, i32], [i32]),
|
||||
"i32.ge_u": sign([i32, i32], [i32]),
|
||||
"i64.eqz": sign([i64], [i64]),
|
||||
"i64.eq": sign([i64, i64], [i32]),
|
||||
"i64.ne": sign([i64, i64], [i32]),
|
||||
"i64.lt_s": sign([i64, i64], [i32]),
|
||||
"i64.lt_u": sign([i64, i64], [i32]),
|
||||
"i64.gt_s": sign([i64, i64], [i32]),
|
||||
"i64.gt_u": sign([i64, i64], [i32]),
|
||||
"i64.le_s": sign([i64, i64], [i32]),
|
||||
"i64.le_u": sign([i64, i64], [i32]),
|
||||
"i64.ge_s": sign([i64, i64], [i32]),
|
||||
"i64.ge_u": sign([i64, i64], [i32]),
|
||||
"f32.eq": sign([f32, f32], [i32]),
|
||||
"f32.ne": sign([f32, f32], [i32]),
|
||||
"f32.lt": sign([f32, f32], [i32]),
|
||||
"f32.gt": sign([f32, f32], [i32]),
|
||||
"f32.le": sign([f32, f32], [i32]),
|
||||
"f32.ge": sign([f32, f32], [i32]),
|
||||
"f64.eq": sign([f64, f64], [i32]),
|
||||
"f64.ne": sign([f64, f64], [i32]),
|
||||
"f64.lt": sign([f64, f64], [i32]),
|
||||
"f64.gt": sign([f64, f64], [i32]),
|
||||
"f64.le": sign([f64, f64], [i32]),
|
||||
"f64.ge": sign([f64, f64], [i32]),
|
||||
"i32.clz": sign([i32], [i32]),
|
||||
"i32.ctz": sign([i32], [i32]),
|
||||
"i32.popcnt": sign([i32], [i32]),
|
||||
"i32.add": sign([i32, i32], [i32]),
|
||||
"i32.sub": sign([i32, i32], [i32]),
|
||||
"i32.mul": sign([i32, i32], [i32]),
|
||||
"i32.div_s": sign([i32, i32], [i32]),
|
||||
"i32.div_u": sign([i32, i32], [i32]),
|
||||
"i32.rem_s": sign([i32, i32], [i32]),
|
||||
"i32.rem_u": sign([i32, i32], [i32]),
|
||||
"i32.and": sign([i32, i32], [i32]),
|
||||
"i32.or": sign([i32, i32], [i32]),
|
||||
"i32.xor": sign([i32, i32], [i32]),
|
||||
"i32.shl": sign([i32, i32], [i32]),
|
||||
"i32.shr_s": sign([i32, i32], [i32]),
|
||||
"i32.shr_u": sign([i32, i32], [i32]),
|
||||
"i32.rotl": sign([i32, i32], [i32]),
|
||||
"i32.rotr": sign([i32, i32], [i32]),
|
||||
"i64.clz": sign([i64], [i64]),
|
||||
"i64.ctz": sign([i64], [i64]),
|
||||
"i64.popcnt": sign([i64], [i64]),
|
||||
"i64.add": sign([i64, i64], [i64]),
|
||||
"i64.sub": sign([i64, i64], [i64]),
|
||||
"i64.mul": sign([i64, i64], [i64]),
|
||||
"i64.div_s": sign([i64, i64], [i64]),
|
||||
"i64.div_u": sign([i64, i64], [i64]),
|
||||
"i64.rem_s": sign([i64, i64], [i64]),
|
||||
"i64.rem_u": sign([i64, i64], [i64]),
|
||||
"i64.and": sign([i64, i64], [i64]),
|
||||
"i64.or": sign([i64, i64], [i64]),
|
||||
"i64.xor": sign([i64, i64], [i64]),
|
||||
"i64.shl": sign([i64, i64], [i64]),
|
||||
"i64.shr_s": sign([i64, i64], [i64]),
|
||||
"i64.shr_u": sign([i64, i64], [i64]),
|
||||
"i64.rotl": sign([i64, i64], [i64]),
|
||||
"i64.rotr": sign([i64, i64], [i64]),
|
||||
"f32.abs": sign([f32], [f32]),
|
||||
"f32.neg": sign([f32], [f32]),
|
||||
"f32.ceil": sign([f32], [f32]),
|
||||
"f32.floor": sign([f32], [f32]),
|
||||
"f32.trunc": sign([f32], [f32]),
|
||||
"f32.nearest": sign([f32], [f32]),
|
||||
"f32.sqrt": sign([f32], [f32]),
|
||||
"f32.add": sign([f32, f32], [f32]),
|
||||
"f32.sub": sign([f32, f32], [f32]),
|
||||
"f32.mul": sign([f32, f32], [f32]),
|
||||
"f32.div": sign([f32, f32], [f32]),
|
||||
"f32.min": sign([f32, f32], [f32]),
|
||||
"f32.max": sign([f32, f32], [f32]),
|
||||
"f32.copysign": sign([f32, f32], [f32]),
|
||||
"f64.abs": sign([f64], [f64]),
|
||||
"f64.neg": sign([f64], [f64]),
|
||||
"f64.ceil": sign([f64], [f64]),
|
||||
"f64.floor": sign([f64], [f64]),
|
||||
"f64.trunc": sign([f64], [f64]),
|
||||
"f64.nearest": sign([f64], [f64]),
|
||||
"f64.sqrt": sign([f64], [f64]),
|
||||
"f64.add": sign([f64, f64], [f64]),
|
||||
"f64.sub": sign([f64, f64], [f64]),
|
||||
"f64.mul": sign([f64, f64], [f64]),
|
||||
"f64.div": sign([f64, f64], [f64]),
|
||||
"f64.min": sign([f64, f64], [f64]),
|
||||
"f64.max": sign([f64, f64], [f64]),
|
||||
"f64.copysign": sign([f64, f64], [f64]),
|
||||
"i32.wrap/i64": sign([i64], [i32]),
|
||||
"i32.trunc_s/f32": sign([f32], [i32]),
|
||||
"i32.trunc_u/f32": sign([f32], [i32]),
|
||||
"i32.trunc_s/f64": sign([f32], [i32]),
|
||||
"i32.trunc_u/f64": sign([f64], [i32]),
|
||||
"i64.extend_s/i32": sign([i32], [i64]),
|
||||
"i64.extend_u/i32": sign([i32], [i64]),
|
||||
"i64.trunc_s/f32": sign([f32], [i64]),
|
||||
"i64.trunc_u/f32": sign([f32], [i64]),
|
||||
"i64.trunc_s/f64": sign([f64], [i64]),
|
||||
"i64.trunc_u/f64": sign([f64], [i64]),
|
||||
"f32.convert_s/i32": sign([i32], [f32]),
|
||||
"f32.convert_u/i32": sign([i32], [f32]),
|
||||
"f32.convert_s/i64": sign([i64], [f32]),
|
||||
"f32.convert_u/i64": sign([i64], [f32]),
|
||||
"f32.demote/f64": sign([f64], [f32]),
|
||||
"f64.convert_s/i32": sign([i32], [f64]),
|
||||
"f64.convert_u/i32": sign([i32], [f64]),
|
||||
"f64.convert_s/i64": sign([i64], [f64]),
|
||||
"f64.convert_u/i64": sign([i64], [f64]),
|
||||
"f64.promote/f32": sign([f32], [f64]),
|
||||
"i32.reinterpret/f32": sign([f32], [i32]),
|
||||
"i64.reinterpret/f64": sign([f64], [i64]),
|
||||
"f32.reinterpret/i32": sign([i32], [f32]),
|
||||
"f64.reinterpret/i64": sign([i64], [f64])
|
||||
};
|
||||
export var signatures = Object.assign({}, controlInstructions, parametricInstructions, variableInstructions, memoryInstructions, numericInstructions);
|
||||
76
node_modules/@webassemblyjs/ast/esm/transform/denormalize-type-references/index.js
generated
vendored
Normal file
76
node_modules/@webassemblyjs/ast/esm/transform/denormalize-type-references/index.js
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
var t = require("../../index"); // func and call_indirect instructions can either define a signature inline, or
|
||||
// reference a signature, e.g.
|
||||
//
|
||||
// ;; inline signature
|
||||
// (func (result i64)
|
||||
// (i64.const 2)
|
||||
// )
|
||||
// ;; signature reference
|
||||
// (type (func (result i64)))
|
||||
// (func (type 0)
|
||||
// (i64.const 2))
|
||||
// )
|
||||
//
|
||||
// this AST transform denormalises the type references, making all signatures within the module
|
||||
// inline.
|
||||
|
||||
|
||||
export function transform(ast) {
|
||||
var typeInstructions = [];
|
||||
t.traverse(ast, {
|
||||
TypeInstruction: function TypeInstruction(_ref) {
|
||||
var node = _ref.node;
|
||||
typeInstructions.push(node);
|
||||
}
|
||||
});
|
||||
|
||||
if (!typeInstructions.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
function denormalizeSignature(signature) {
|
||||
// signature referenced by identifier
|
||||
if (signature.type === "Identifier") {
|
||||
var identifier = signature;
|
||||
var typeInstruction = typeInstructions.find(function (t) {
|
||||
return t.id.type === identifier.type && t.id.value === identifier.value;
|
||||
});
|
||||
|
||||
if (!typeInstruction) {
|
||||
throw new Error("A type instruction reference was not found ".concat(JSON.stringify(signature)));
|
||||
}
|
||||
|
||||
return typeInstruction.functype;
|
||||
} // signature referenced by index
|
||||
|
||||
|
||||
if (signature.type === "NumberLiteral") {
|
||||
var signatureRef = signature;
|
||||
var _typeInstruction = typeInstructions[signatureRef.value];
|
||||
return _typeInstruction.functype;
|
||||
}
|
||||
|
||||
return signature;
|
||||
}
|
||||
|
||||
t.traverse(ast, {
|
||||
Func: function (_Func) {
|
||||
function Func(_x) {
|
||||
return _Func.apply(this, arguments);
|
||||
}
|
||||
|
||||
Func.toString = function () {
|
||||
return _Func.toString();
|
||||
};
|
||||
|
||||
return Func;
|
||||
}(function (_ref2) {
|
||||
var node = _ref2.node;
|
||||
node.signature = denormalizeSignature(node.signature);
|
||||
}),
|
||||
CallIndirectInstruction: function CallIndirectInstruction(_ref3) {
|
||||
var node = _ref3.node;
|
||||
node.signature = denormalizeSignature(node.signature);
|
||||
}
|
||||
});
|
||||
}
|
||||
216
node_modules/@webassemblyjs/ast/esm/transform/wast-identifier-to-index/index.js
generated
vendored
Normal file
216
node_modules/@webassemblyjs/ast/esm/transform/wast-identifier-to-index/index.js
generated
vendored
Normal file
@@ -0,0 +1,216 @@
|
||||
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
|
||||
|
||||
function _sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
|
||||
|
||||
function _slicedToArray(arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return _sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }
|
||||
|
||||
import { isBlock, isFunc, isIdentifier, numberLiteralFromRaw, traverse } from "../../index";
|
||||
import { moduleContextFromModuleAST } from "@webassemblyjs/helper-module-context"; // FIXME(sven): do the same with all block instructions, must be more generic here
|
||||
|
||||
function newUnexpectedFunction(i) {
|
||||
return new Error("unknown function at offset: " + i);
|
||||
}
|
||||
|
||||
export function transform(ast) {
|
||||
var module;
|
||||
traverse(ast, {
|
||||
Module: function (_Module) {
|
||||
function Module(_x) {
|
||||
return _Module.apply(this, arguments);
|
||||
}
|
||||
|
||||
Module.toString = function () {
|
||||
return _Module.toString();
|
||||
};
|
||||
|
||||
return Module;
|
||||
}(function (path) {
|
||||
module = path.node;
|
||||
})
|
||||
});
|
||||
var moduleContext = moduleContextFromModuleAST(module); // Transform the actual instruction in function bodies
|
||||
|
||||
traverse(ast, {
|
||||
Func: function (_Func) {
|
||||
function Func(_x2) {
|
||||
return _Func.apply(this, arguments);
|
||||
}
|
||||
|
||||
Func.toString = function () {
|
||||
return _Func.toString();
|
||||
};
|
||||
|
||||
return Func;
|
||||
}(function (path) {
|
||||
transformFuncPath(path, moduleContext);
|
||||
}),
|
||||
Start: function (_Start) {
|
||||
function Start(_x3) {
|
||||
return _Start.apply(this, arguments);
|
||||
}
|
||||
|
||||
Start.toString = function () {
|
||||
return _Start.toString();
|
||||
};
|
||||
|
||||
return Start;
|
||||
}(function (path) {
|
||||
var index = path.node.index;
|
||||
|
||||
if (isIdentifier(index) === true) {
|
||||
var offsetInModule = moduleContext.getFunctionOffsetByIdentifier(index.value);
|
||||
|
||||
if (typeof offsetInModule === "undefined") {
|
||||
throw newUnexpectedFunction(index.value);
|
||||
} // Replace the index Identifier
|
||||
// $FlowIgnore: reference?
|
||||
|
||||
|
||||
path.node.index = numberLiteralFromRaw(offsetInModule);
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
function transformFuncPath(funcPath, moduleContext) {
|
||||
var funcNode = funcPath.node;
|
||||
var signature = funcNode.signature;
|
||||
|
||||
if (signature.type !== "Signature") {
|
||||
throw new Error("Function signatures must be denormalised before execution");
|
||||
}
|
||||
|
||||
var params = signature.params; // Add func locals in the context
|
||||
|
||||
params.forEach(function (p) {
|
||||
return moduleContext.addLocal(p.valtype);
|
||||
});
|
||||
traverse(funcNode, {
|
||||
Instr: function (_Instr) {
|
||||
function Instr(_x4) {
|
||||
return _Instr.apply(this, arguments);
|
||||
}
|
||||
|
||||
Instr.toString = function () {
|
||||
return _Instr.toString();
|
||||
};
|
||||
|
||||
return Instr;
|
||||
}(function (instrPath) {
|
||||
var instrNode = instrPath.node;
|
||||
/**
|
||||
* Local access
|
||||
*/
|
||||
|
||||
if (instrNode.id === "get_local" || instrNode.id === "set_local" || instrNode.id === "tee_local") {
|
||||
var _instrNode$args = _slicedToArray(instrNode.args, 1),
|
||||
firstArg = _instrNode$args[0];
|
||||
|
||||
if (firstArg.type === "Identifier") {
|
||||
var offsetInParams = params.findIndex(function (_ref) {
|
||||
var id = _ref.id;
|
||||
return id === firstArg.value;
|
||||
});
|
||||
|
||||
if (offsetInParams === -1) {
|
||||
throw new Error("".concat(firstArg.value, " not found in ").concat(instrNode.id, ": not declared in func params"));
|
||||
} // Replace the Identifer node by our new NumberLiteral node
|
||||
|
||||
|
||||
instrNode.args[0] = numberLiteralFromRaw(offsetInParams);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Global access
|
||||
*/
|
||||
|
||||
|
||||
if (instrNode.id === "get_global" || instrNode.id === "set_global") {
|
||||
var _instrNode$args2 = _slicedToArray(instrNode.args, 1),
|
||||
_firstArg = _instrNode$args2[0];
|
||||
|
||||
if (isIdentifier(_firstArg) === true) {
|
||||
var globalOffset = moduleContext.getGlobalOffsetByIdentifier( // $FlowIgnore: reference?
|
||||
_firstArg.value);
|
||||
|
||||
if (typeof globalOffset === "undefined") {
|
||||
// $FlowIgnore: reference?
|
||||
throw new Error("global ".concat(_firstArg.value, " not found in module"));
|
||||
} // Replace the Identifer node by our new NumberLiteral node
|
||||
|
||||
|
||||
instrNode.args[0] = numberLiteralFromRaw(globalOffset);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Labels lookup
|
||||
*/
|
||||
|
||||
|
||||
if (instrNode.id === "br") {
|
||||
var _instrNode$args3 = _slicedToArray(instrNode.args, 1),
|
||||
_firstArg2 = _instrNode$args3[0];
|
||||
|
||||
if (isIdentifier(_firstArg2) === true) {
|
||||
// if the labels is not found it is going to be replaced with -1
|
||||
// which is invalid.
|
||||
var relativeBlockCount = -1; // $FlowIgnore: reference?
|
||||
|
||||
instrPath.findParent(function (_ref2) {
|
||||
var node = _ref2.node;
|
||||
|
||||
if (isBlock(node)) {
|
||||
relativeBlockCount++; // $FlowIgnore: reference?
|
||||
|
||||
var name = node.label || node.name;
|
||||
|
||||
if (_typeof(name) === "object") {
|
||||
// $FlowIgnore: isIdentifier ensures that
|
||||
if (name.value === _firstArg2.value) {
|
||||
// Found it
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isFunc(node)) {
|
||||
return false;
|
||||
}
|
||||
}); // Replace the Identifer node by our new NumberLiteral node
|
||||
|
||||
instrNode.args[0] = numberLiteralFromRaw(relativeBlockCount);
|
||||
}
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Func lookup
|
||||
*/
|
||||
CallInstruction: function (_CallInstruction) {
|
||||
function CallInstruction(_x5) {
|
||||
return _CallInstruction.apply(this, arguments);
|
||||
}
|
||||
|
||||
CallInstruction.toString = function () {
|
||||
return _CallInstruction.toString();
|
||||
};
|
||||
|
||||
return CallInstruction;
|
||||
}(function (_ref3) {
|
||||
var node = _ref3.node;
|
||||
var index = node.index;
|
||||
|
||||
if (isIdentifier(index) === true) {
|
||||
var offsetInModule = moduleContext.getFunctionOffsetByIdentifier(index.value);
|
||||
|
||||
if (typeof offsetInModule === "undefined") {
|
||||
throw newUnexpectedFunction(index.value);
|
||||
} // Replace the index Identifier
|
||||
// $FlowIgnore: reference?
|
||||
|
||||
|
||||
node.index = numberLiteralFromRaw(offsetInModule);
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
96
node_modules/@webassemblyjs/ast/esm/traverse.js
generated
vendored
Normal file
96
node_modules/@webassemblyjs/ast/esm/traverse.js
generated
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
import { createPath } from "./node-path";
|
||||
import { unionTypesMap, nodeAndUnionTypes } from "./nodes"; // recursively walks the AST starting at the given node. The callback is invoked for
|
||||
// and object that has a 'type' property.
|
||||
|
||||
function walk(context, callback) {
|
||||
var stop = false;
|
||||
|
||||
function innerWalk(context, callback) {
|
||||
if (stop) {
|
||||
return;
|
||||
}
|
||||
|
||||
var node = context.node;
|
||||
|
||||
if (node === undefined) {
|
||||
console.warn("traversing with an empty context");
|
||||
return;
|
||||
}
|
||||
|
||||
if (node._deleted === true) {
|
||||
return;
|
||||
}
|
||||
|
||||
var path = createPath(context);
|
||||
callback(node.type, path);
|
||||
|
||||
if (path.shouldStop) {
|
||||
stop = true;
|
||||
return;
|
||||
}
|
||||
|
||||
Object.keys(node).forEach(function (prop) {
|
||||
var value = node[prop];
|
||||
|
||||
if (value === null || value === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
var valueAsArray = Array.isArray(value) ? value : [value];
|
||||
valueAsArray.forEach(function (childNode) {
|
||||
if (typeof childNode.type === "string") {
|
||||
var childContext = {
|
||||
node: childNode,
|
||||
parentKey: prop,
|
||||
parentPath: path,
|
||||
shouldStop: false,
|
||||
inList: Array.isArray(value)
|
||||
};
|
||||
innerWalk(childContext, callback);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
innerWalk(context, callback);
|
||||
}
|
||||
|
||||
var noop = function noop() {};
|
||||
|
||||
export function traverse(node, visitors) {
|
||||
var before = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : noop;
|
||||
var after = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : noop;
|
||||
Object.keys(visitors).forEach(function (visitor) {
|
||||
if (!nodeAndUnionTypes.includes(visitor)) {
|
||||
throw new Error("Unexpected visitor ".concat(visitor));
|
||||
}
|
||||
});
|
||||
var context = {
|
||||
node: node,
|
||||
inList: false,
|
||||
shouldStop: false,
|
||||
parentPath: null,
|
||||
parentKey: null
|
||||
};
|
||||
walk(context, function (type, path) {
|
||||
if (typeof visitors[type] === "function") {
|
||||
before(type, path);
|
||||
visitors[type](path);
|
||||
after(type, path);
|
||||
}
|
||||
|
||||
var unionTypes = unionTypesMap[type];
|
||||
|
||||
if (!unionTypes) {
|
||||
throw new Error("Unexpected node type ".concat(type));
|
||||
}
|
||||
|
||||
unionTypes.forEach(function (unionType) {
|
||||
if (typeof visitors[unionType] === "function") {
|
||||
before(unionType, path);
|
||||
visitors[unionType](path);
|
||||
after(unionType, path);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
0
node_modules/@webassemblyjs/ast/esm/types/basic.js
generated
vendored
Normal file
0
node_modules/@webassemblyjs/ast/esm/types/basic.js
generated
vendored
Normal file
0
node_modules/@webassemblyjs/ast/esm/types/nodes.js
generated
vendored
Normal file
0
node_modules/@webassemblyjs/ast/esm/types/nodes.js
generated
vendored
Normal file
0
node_modules/@webassemblyjs/ast/esm/types/traverse.js
generated
vendored
Normal file
0
node_modules/@webassemblyjs/ast/esm/types/traverse.js
generated
vendored
Normal file
265
node_modules/@webassemblyjs/ast/esm/utils.js
generated
vendored
Normal file
265
node_modules/@webassemblyjs/ast/esm/utils.js
generated
vendored
Normal file
@@ -0,0 +1,265 @@
|
||||
function _sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
|
||||
|
||||
function _slicedToArray(arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return _sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }
|
||||
|
||||
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
|
||||
|
||||
import { signatures } from "./signatures";
|
||||
import { traverse } from "./traverse";
|
||||
import constants from "@webassemblyjs/helper-wasm-bytecode";
|
||||
import { getSectionForNode } from "@webassemblyjs/helper-wasm-bytecode";
|
||||
export function isAnonymous(ident) {
|
||||
return ident.raw === "";
|
||||
}
|
||||
export function getSectionMetadata(ast, name) {
|
||||
var section;
|
||||
traverse(ast, {
|
||||
SectionMetadata: function (_SectionMetadata) {
|
||||
function SectionMetadata(_x) {
|
||||
return _SectionMetadata.apply(this, arguments);
|
||||
}
|
||||
|
||||
SectionMetadata.toString = function () {
|
||||
return _SectionMetadata.toString();
|
||||
};
|
||||
|
||||
return SectionMetadata;
|
||||
}(function (_ref) {
|
||||
var node = _ref.node;
|
||||
|
||||
if (node.section === name) {
|
||||
section = node;
|
||||
}
|
||||
})
|
||||
});
|
||||
return section;
|
||||
}
|
||||
export function getSectionMetadatas(ast, name) {
|
||||
var sections = [];
|
||||
traverse(ast, {
|
||||
SectionMetadata: function (_SectionMetadata2) {
|
||||
function SectionMetadata(_x2) {
|
||||
return _SectionMetadata2.apply(this, arguments);
|
||||
}
|
||||
|
||||
SectionMetadata.toString = function () {
|
||||
return _SectionMetadata2.toString();
|
||||
};
|
||||
|
||||
return SectionMetadata;
|
||||
}(function (_ref2) {
|
||||
var node = _ref2.node;
|
||||
|
||||
if (node.section === name) {
|
||||
sections.push(node);
|
||||
}
|
||||
})
|
||||
});
|
||||
return sections;
|
||||
}
|
||||
export function sortSectionMetadata(m) {
|
||||
if (m.metadata == null) {
|
||||
console.warn("sortSectionMetadata: no metadata to sort");
|
||||
return;
|
||||
} // $FlowIgnore
|
||||
|
||||
|
||||
m.metadata.sections.sort(function (a, b) {
|
||||
var aId = constants.sections[a.section];
|
||||
var bId = constants.sections[b.section];
|
||||
|
||||
if (typeof aId !== "number" || typeof bId !== "number") {
|
||||
throw new Error("Section id not found");
|
||||
}
|
||||
|
||||
return aId - bId;
|
||||
});
|
||||
}
|
||||
export function orderedInsertNode(m, n) {
|
||||
assertHasLoc(n);
|
||||
var didInsert = false;
|
||||
|
||||
if (n.type === "ModuleExport") {
|
||||
m.fields.push(n);
|
||||
return;
|
||||
}
|
||||
|
||||
m.fields = m.fields.reduce(function (acc, field) {
|
||||
var fieldEndCol = Infinity;
|
||||
|
||||
if (field.loc != null) {
|
||||
// $FlowIgnore
|
||||
fieldEndCol = field.loc.end.column;
|
||||
} // $FlowIgnore: assertHasLoc ensures that
|
||||
|
||||
|
||||
if (didInsert === false && n.loc.start.column < fieldEndCol) {
|
||||
didInsert = true;
|
||||
acc.push(n);
|
||||
}
|
||||
|
||||
acc.push(field);
|
||||
return acc;
|
||||
}, []); // Handles empty modules or n is the last element
|
||||
|
||||
if (didInsert === false) {
|
||||
m.fields.push(n);
|
||||
}
|
||||
}
|
||||
export function assertHasLoc(n) {
|
||||
if (n.loc == null || n.loc.start == null || n.loc.end == null) {
|
||||
throw new Error("Internal failure: node (".concat(JSON.stringify(n.type), ") has no location information"));
|
||||
}
|
||||
}
|
||||
export function getEndOfSection(s) {
|
||||
assertHasLoc(s.size);
|
||||
return s.startOffset + s.size.value + ( // $FlowIgnore
|
||||
s.size.loc.end.column - s.size.loc.start.column);
|
||||
}
|
||||
export function shiftLoc(node, delta) {
|
||||
// $FlowIgnore
|
||||
node.loc.start.column += delta; // $FlowIgnore
|
||||
|
||||
node.loc.end.column += delta;
|
||||
}
|
||||
export function shiftSection(ast, node, delta) {
|
||||
if (node.type !== "SectionMetadata") {
|
||||
throw new Error("Can not shift node " + JSON.stringify(node.type));
|
||||
}
|
||||
|
||||
node.startOffset += delta;
|
||||
|
||||
if (_typeof(node.size.loc) === "object") {
|
||||
shiftLoc(node.size, delta);
|
||||
} // Custom sections doesn't have vectorOfSize
|
||||
|
||||
|
||||
if (_typeof(node.vectorOfSize) === "object" && _typeof(node.vectorOfSize.loc) === "object") {
|
||||
shiftLoc(node.vectorOfSize, delta);
|
||||
}
|
||||
|
||||
var sectionName = node.section; // shift node locations within that section
|
||||
|
||||
traverse(ast, {
|
||||
Node: function Node(_ref3) {
|
||||
var node = _ref3.node;
|
||||
var section = getSectionForNode(node);
|
||||
|
||||
if (section === sectionName && _typeof(node.loc) === "object") {
|
||||
shiftLoc(node, delta);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
export function signatureForOpcode(object, name) {
|
||||
var opcodeName = name;
|
||||
|
||||
if (object !== undefined && object !== "") {
|
||||
opcodeName = object + "." + name;
|
||||
}
|
||||
|
||||
var sign = signatures[opcodeName];
|
||||
|
||||
if (sign == undefined) {
|
||||
// TODO: Uncomment this when br_table and others has been done
|
||||
//throw new Error("Invalid opcode: "+opcodeName);
|
||||
return [object, object];
|
||||
}
|
||||
|
||||
return sign[0];
|
||||
}
|
||||
export function getUniqueNameGenerator() {
|
||||
var inc = {};
|
||||
return function () {
|
||||
var prefix = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : "temp";
|
||||
|
||||
if (!(prefix in inc)) {
|
||||
inc[prefix] = 0;
|
||||
} else {
|
||||
inc[prefix] = inc[prefix] + 1;
|
||||
}
|
||||
|
||||
return prefix + "_" + inc[prefix];
|
||||
};
|
||||
}
|
||||
export function getStartByteOffset(n) {
|
||||
// $FlowIgnore
|
||||
if (typeof n.loc === "undefined" || typeof n.loc.start === "undefined") {
|
||||
throw new Error( // $FlowIgnore
|
||||
"Can not get byte offset without loc informations, node: " + String(n.id));
|
||||
}
|
||||
|
||||
return n.loc.start.column;
|
||||
}
|
||||
export function getEndByteOffset(n) {
|
||||
// $FlowIgnore
|
||||
if (typeof n.loc === "undefined" || typeof n.loc.end === "undefined") {
|
||||
throw new Error("Can not get byte offset without loc informations, node: " + n.type);
|
||||
}
|
||||
|
||||
return n.loc.end.column;
|
||||
}
|
||||
export function getFunctionBeginingByteOffset(n) {
|
||||
if (!(n.body.length > 0)) {
|
||||
throw new Error('n.body.length > 0' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var _n$body = _slicedToArray(n.body, 1),
|
||||
firstInstruction = _n$body[0];
|
||||
|
||||
return getStartByteOffset(firstInstruction);
|
||||
}
|
||||
export function getEndBlockByteOffset(n) {
|
||||
// $FlowIgnore
|
||||
if (!(n.instr.length > 0 || n.body.length > 0)) {
|
||||
throw new Error('n.instr.length > 0 || n.body.length > 0' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var lastInstruction;
|
||||
|
||||
if (n.instr) {
|
||||
// $FlowIgnore
|
||||
lastInstruction = n.instr[n.instr.length - 1];
|
||||
}
|
||||
|
||||
if (n.body) {
|
||||
// $FlowIgnore
|
||||
lastInstruction = n.body[n.body.length - 1];
|
||||
}
|
||||
|
||||
if (!(_typeof(lastInstruction) === "object")) {
|
||||
throw new Error('typeof lastInstruction === "object"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
// $FlowIgnore
|
||||
return getStartByteOffset(lastInstruction);
|
||||
}
|
||||
export function getStartBlockByteOffset(n) {
|
||||
// $FlowIgnore
|
||||
if (!(n.instr.length > 0 || n.body.length > 0)) {
|
||||
throw new Error('n.instr.length > 0 || n.body.length > 0' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var fistInstruction;
|
||||
|
||||
if (n.instr) {
|
||||
// $FlowIgnore
|
||||
var _n$instr = _slicedToArray(n.instr, 1);
|
||||
|
||||
fistInstruction = _n$instr[0];
|
||||
}
|
||||
|
||||
if (n.body) {
|
||||
// $FlowIgnore
|
||||
var _n$body2 = _slicedToArray(n.body, 1);
|
||||
|
||||
fistInstruction = _n$body2[0];
|
||||
}
|
||||
|
||||
if (!(_typeof(fistInstruction) === "object")) {
|
||||
throw new Error('typeof fistInstruction === "object"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
// $FlowIgnore
|
||||
return getStartByteOffset(fistInstruction);
|
||||
}
|
||||
17
node_modules/@webassemblyjs/ast/lib/clone.js
generated
vendored
Normal file
17
node_modules/@webassemblyjs/ast/lib/clone.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.cloneNode = cloneNode;
|
||||
|
||||
function cloneNode(n) {
|
||||
// $FlowIgnore
|
||||
var newObj = {};
|
||||
|
||||
for (var k in n) {
|
||||
newObj[k] = n[k];
|
||||
}
|
||||
|
||||
return newObj;
|
||||
}
|
||||
663
node_modules/@webassemblyjs/ast/lib/definitions.js
generated
vendored
Normal file
663
node_modules/@webassemblyjs/ast/lib/definitions.js
generated
vendored
Normal file
@@ -0,0 +1,663 @@
|
||||
var definitions = {};
|
||||
|
||||
function defineType(typeName, metadata) {
|
||||
definitions[typeName] = metadata;
|
||||
}
|
||||
|
||||
defineType("Module", {
|
||||
spec: {
|
||||
wasm: "https://webassembly.github.io/spec/core/binary/modules.html#binary-module",
|
||||
wat: "https://webassembly.github.io/spec/core/text/modules.html#text-module"
|
||||
},
|
||||
doc: "A module consists of a sequence of sections (termed fields in the text format).",
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
id: {
|
||||
maybe: true,
|
||||
type: "string"
|
||||
},
|
||||
fields: {
|
||||
array: true,
|
||||
type: "Node"
|
||||
},
|
||||
metadata: {
|
||||
optional: true,
|
||||
type: "ModuleMetadata"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ModuleMetadata", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
sections: {
|
||||
array: true,
|
||||
type: "SectionMetadata"
|
||||
},
|
||||
functionNames: {
|
||||
optional: true,
|
||||
array: true,
|
||||
type: "FunctionNameMetadata"
|
||||
},
|
||||
localNames: {
|
||||
optional: true,
|
||||
array: true,
|
||||
type: "ModuleMetadata"
|
||||
},
|
||||
producers: {
|
||||
optional: true,
|
||||
array: true,
|
||||
type: "ProducersSectionMetadata"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ModuleNameMetadata", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("FunctionNameMetadata", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "string"
|
||||
},
|
||||
index: {
|
||||
type: "number"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("LocalNameMetadata", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "string"
|
||||
},
|
||||
localIndex: {
|
||||
type: "number"
|
||||
},
|
||||
functionIndex: {
|
||||
type: "number"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("BinaryModule", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
id: {
|
||||
maybe: true,
|
||||
type: "string"
|
||||
},
|
||||
blob: {
|
||||
array: true,
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("QuoteModule", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
id: {
|
||||
maybe: true,
|
||||
type: "string"
|
||||
},
|
||||
string: {
|
||||
array: true,
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("SectionMetadata", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
section: {
|
||||
type: "SectionName"
|
||||
},
|
||||
startOffset: {
|
||||
type: "number"
|
||||
},
|
||||
size: {
|
||||
type: "NumberLiteral"
|
||||
},
|
||||
vectorOfSize: {
|
||||
comment: "Size of the vector in the section (if any)",
|
||||
type: "NumberLiteral"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ProducersSectionMetadata", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
producers: {
|
||||
array: true,
|
||||
type: "ProducerMetadata"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ProducerMetadata", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
language: {
|
||||
type: "ProducerMetadataVersionedName",
|
||||
array: true
|
||||
},
|
||||
processedBy: {
|
||||
type: "ProducerMetadataVersionedName",
|
||||
array: true
|
||||
},
|
||||
sdk: {
|
||||
type: "ProducerMetadataVersionedName",
|
||||
array: true
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ProducerMetadataVersionedName", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
name: {
|
||||
type: "string"
|
||||
},
|
||||
version: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
/*
|
||||
Instructions
|
||||
*/
|
||||
|
||||
defineType("LoopInstruction", {
|
||||
unionType: ["Node", "Block", "Instruction"],
|
||||
fields: {
|
||||
id: {
|
||||
constant: true,
|
||||
type: "string",
|
||||
value: "loop"
|
||||
},
|
||||
label: {
|
||||
maybe: true,
|
||||
type: "Identifier"
|
||||
},
|
||||
resulttype: {
|
||||
maybe: true,
|
||||
type: "Valtype"
|
||||
},
|
||||
instr: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Instr", {
|
||||
unionType: ["Node", "Expression", "Instruction"],
|
||||
fields: {
|
||||
id: {
|
||||
type: "string"
|
||||
},
|
||||
object: {
|
||||
optional: true,
|
||||
type: "Valtype"
|
||||
},
|
||||
args: {
|
||||
array: true,
|
||||
type: "Expression"
|
||||
},
|
||||
namedArgs: {
|
||||
optional: true,
|
||||
type: "Object"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("IfInstruction", {
|
||||
unionType: ["Node", "Instruction"],
|
||||
fields: {
|
||||
id: {
|
||||
constant: true,
|
||||
type: "string",
|
||||
value: "if"
|
||||
},
|
||||
testLabel: {
|
||||
comment: "only for WAST",
|
||||
type: "Identifier"
|
||||
},
|
||||
test: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
},
|
||||
result: {
|
||||
maybe: true,
|
||||
type: "Valtype"
|
||||
},
|
||||
consequent: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
},
|
||||
alternate: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
}
|
||||
}
|
||||
});
|
||||
/*
|
||||
Concrete value types
|
||||
*/
|
||||
|
||||
defineType("StringLiteral", {
|
||||
unionType: ["Node", "Expression"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("NumberLiteral", {
|
||||
unionType: ["Node", "NumericLiteral", "Expression"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "number"
|
||||
},
|
||||
raw: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("LongNumberLiteral", {
|
||||
unionType: ["Node", "NumericLiteral", "Expression"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "LongNumber"
|
||||
},
|
||||
raw: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("FloatLiteral", {
|
||||
unionType: ["Node", "NumericLiteral", "Expression"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "number"
|
||||
},
|
||||
nan: {
|
||||
optional: true,
|
||||
type: "boolean"
|
||||
},
|
||||
inf: {
|
||||
optional: true,
|
||||
type: "boolean"
|
||||
},
|
||||
raw: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Elem", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
table: {
|
||||
type: "Index"
|
||||
},
|
||||
offset: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
},
|
||||
funcs: {
|
||||
array: true,
|
||||
type: "Index"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("IndexInFuncSection", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
index: {
|
||||
type: "Index"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ValtypeLiteral", {
|
||||
unionType: ["Node", "Expression"],
|
||||
fields: {
|
||||
name: {
|
||||
type: "Valtype"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("TypeInstruction", {
|
||||
unionType: ["Node", "Instruction"],
|
||||
fields: {
|
||||
id: {
|
||||
maybe: true,
|
||||
type: "Index"
|
||||
},
|
||||
functype: {
|
||||
type: "Signature"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Start", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
index: {
|
||||
type: "Index"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("GlobalType", {
|
||||
unionType: ["Node", "ImportDescr"],
|
||||
fields: {
|
||||
valtype: {
|
||||
type: "Valtype"
|
||||
},
|
||||
mutability: {
|
||||
type: "Mutability"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("LeadingComment", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("BlockComment", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Data", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
memoryIndex: {
|
||||
type: "Memidx"
|
||||
},
|
||||
offset: {
|
||||
type: "Instruction"
|
||||
},
|
||||
init: {
|
||||
type: "ByteArray"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Global", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
globalType: {
|
||||
type: "GlobalType"
|
||||
},
|
||||
init: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
},
|
||||
name: {
|
||||
maybe: true,
|
||||
type: "Identifier"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Table", {
|
||||
unionType: ["Node", "ImportDescr"],
|
||||
fields: {
|
||||
elementType: {
|
||||
type: "TableElementType"
|
||||
},
|
||||
limits: {
|
||||
assertNodeType: true,
|
||||
type: "Limit"
|
||||
},
|
||||
name: {
|
||||
maybe: true,
|
||||
type: "Identifier"
|
||||
},
|
||||
elements: {
|
||||
array: true,
|
||||
optional: true,
|
||||
type: "Index"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Memory", {
|
||||
unionType: ["Node", "ImportDescr"],
|
||||
fields: {
|
||||
limits: {
|
||||
type: "Limit"
|
||||
},
|
||||
id: {
|
||||
maybe: true,
|
||||
type: "Index"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("FuncImportDescr", {
|
||||
unionType: ["Node", "ImportDescr"],
|
||||
fields: {
|
||||
id: {
|
||||
type: "Identifier"
|
||||
},
|
||||
signature: {
|
||||
type: "Signature"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ModuleImport", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
module: {
|
||||
type: "string"
|
||||
},
|
||||
name: {
|
||||
type: "string"
|
||||
},
|
||||
descr: {
|
||||
type: "ImportDescr"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ModuleExportDescr", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
exportType: {
|
||||
type: "ExportDescrType"
|
||||
},
|
||||
id: {
|
||||
type: "Index"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ModuleExport", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
name: {
|
||||
type: "string"
|
||||
},
|
||||
descr: {
|
||||
type: "ModuleExportDescr"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Limit", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
min: {
|
||||
type: "number"
|
||||
},
|
||||
max: {
|
||||
optional: true,
|
||||
type: "number"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Signature", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
params: {
|
||||
array: true,
|
||||
type: "FuncParam"
|
||||
},
|
||||
results: {
|
||||
array: true,
|
||||
type: "Valtype"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Program", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
body: {
|
||||
array: true,
|
||||
type: "Node"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Identifier", {
|
||||
unionType: ["Node", "Expression"],
|
||||
fields: {
|
||||
value: {
|
||||
type: "string"
|
||||
},
|
||||
raw: {
|
||||
optional: true,
|
||||
type: "string"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("BlockInstruction", {
|
||||
unionType: ["Node", "Block", "Instruction"],
|
||||
fields: {
|
||||
id: {
|
||||
constant: true,
|
||||
type: "string",
|
||||
value: "block"
|
||||
},
|
||||
label: {
|
||||
maybe: true,
|
||||
type: "Identifier"
|
||||
},
|
||||
instr: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
},
|
||||
result: {
|
||||
maybe: true,
|
||||
type: "Valtype"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("CallInstruction", {
|
||||
unionType: ["Node", "Instruction"],
|
||||
fields: {
|
||||
id: {
|
||||
constant: true,
|
||||
type: "string",
|
||||
value: "call"
|
||||
},
|
||||
index: {
|
||||
type: "Index"
|
||||
},
|
||||
instrArgs: {
|
||||
array: true,
|
||||
optional: true,
|
||||
type: "Expression"
|
||||
},
|
||||
numeric: {
|
||||
type: "Index",
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("CallIndirectInstruction", {
|
||||
unionType: ["Node", "Instruction"],
|
||||
fields: {
|
||||
id: {
|
||||
constant: true,
|
||||
type: "string",
|
||||
value: "call_indirect"
|
||||
},
|
||||
signature: {
|
||||
type: "SignatureOrTypeRef"
|
||||
},
|
||||
intrs: {
|
||||
array: true,
|
||||
optional: true,
|
||||
type: "Expression"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("ByteArray", {
|
||||
unionType: ["Node"],
|
||||
fields: {
|
||||
values: {
|
||||
array: true,
|
||||
type: "Byte"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("Func", {
|
||||
unionType: ["Node", "Block"],
|
||||
fields: {
|
||||
name: {
|
||||
maybe: true,
|
||||
type: "Index"
|
||||
},
|
||||
signature: {
|
||||
type: "SignatureOrTypeRef"
|
||||
},
|
||||
body: {
|
||||
array: true,
|
||||
type: "Instruction"
|
||||
},
|
||||
isExternal: {
|
||||
comment: "means that it has been imported from the outside js",
|
||||
optional: true,
|
||||
type: "boolean"
|
||||
},
|
||||
metadata: {
|
||||
optional: true,
|
||||
type: "FuncMetadata"
|
||||
}
|
||||
}
|
||||
});
|
||||
/**
|
||||
* Intrinsics
|
||||
*/
|
||||
|
||||
defineType("InternalBrUnless", {
|
||||
unionType: ["Node", "Intrinsic"],
|
||||
fields: {
|
||||
target: {
|
||||
type: "number"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("InternalGoto", {
|
||||
unionType: ["Node", "Intrinsic"],
|
||||
fields: {
|
||||
target: {
|
||||
type: "number"
|
||||
}
|
||||
}
|
||||
});
|
||||
defineType("InternalCallExtern", {
|
||||
unionType: ["Node", "Intrinsic"],
|
||||
fields: {
|
||||
target: {
|
||||
type: "number"
|
||||
}
|
||||
}
|
||||
}); // function bodies are terminated by an `end` instruction but are missing a
|
||||
// return instruction
|
||||
//
|
||||
// Since we can't inject a new instruction we are injecting a new instruction.
|
||||
|
||||
defineType("InternalEndAndReturn", {
|
||||
unionType: ["Node", "Intrinsic"],
|
||||
fields: {}
|
||||
});
|
||||
module.exports = definitions;
|
||||
118
node_modules/@webassemblyjs/ast/lib/index.js
generated
vendored
Normal file
118
node_modules/@webassemblyjs/ast/lib/index.js
generated
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
var _exportNames = {
|
||||
numberLiteralFromRaw: true,
|
||||
withLoc: true,
|
||||
withRaw: true,
|
||||
funcParam: true,
|
||||
indexLiteral: true,
|
||||
memIndexLiteral: true,
|
||||
instruction: true,
|
||||
objectInstruction: true,
|
||||
traverse: true,
|
||||
signatures: true,
|
||||
cloneNode: true
|
||||
};
|
||||
Object.defineProperty(exports, "numberLiteralFromRaw", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _nodeHelpers.numberLiteralFromRaw;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "withLoc", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _nodeHelpers.withLoc;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "withRaw", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _nodeHelpers.withRaw;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "funcParam", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _nodeHelpers.funcParam;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "indexLiteral", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _nodeHelpers.indexLiteral;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "memIndexLiteral", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _nodeHelpers.memIndexLiteral;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "instruction", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _nodeHelpers.instruction;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "objectInstruction", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _nodeHelpers.objectInstruction;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "traverse", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _traverse.traverse;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "signatures", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _signatures.signatures;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "cloneNode", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _clone.cloneNode;
|
||||
}
|
||||
});
|
||||
|
||||
var _nodes = require("./nodes");
|
||||
|
||||
Object.keys(_nodes).forEach(function (key) {
|
||||
if (key === "default" || key === "__esModule") return;
|
||||
if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return;
|
||||
Object.defineProperty(exports, key, {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _nodes[key];
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
var _nodeHelpers = require("./node-helpers.js");
|
||||
|
||||
var _traverse = require("./traverse");
|
||||
|
||||
var _signatures = require("./signatures");
|
||||
|
||||
var _utils = require("./utils");
|
||||
|
||||
Object.keys(_utils).forEach(function (key) {
|
||||
if (key === "default" || key === "__esModule") return;
|
||||
if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return;
|
||||
Object.defineProperty(exports, key, {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _utils[key];
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
var _clone = require("./clone");
|
||||
107
node_modules/@webassemblyjs/ast/lib/node-helpers.js
generated
vendored
Normal file
107
node_modules/@webassemblyjs/ast/lib/node-helpers.js
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.numberLiteralFromRaw = numberLiteralFromRaw;
|
||||
exports.instruction = instruction;
|
||||
exports.objectInstruction = objectInstruction;
|
||||
exports.withLoc = withLoc;
|
||||
exports.withRaw = withRaw;
|
||||
exports.funcParam = funcParam;
|
||||
exports.indexLiteral = indexLiteral;
|
||||
exports.memIndexLiteral = memIndexLiteral;
|
||||
|
||||
var _wastParser = require("@webassemblyjs/wast-parser");
|
||||
|
||||
var _nodes = require("./nodes");
|
||||
|
||||
function numberLiteralFromRaw(rawValue) {
|
||||
var instructionType = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : "i32";
|
||||
var original = rawValue; // Remove numeric separators _
|
||||
|
||||
if (typeof rawValue === "string") {
|
||||
rawValue = rawValue.replace(/_/g, "");
|
||||
}
|
||||
|
||||
if (typeof rawValue === "number") {
|
||||
return (0, _nodes.numberLiteral)(rawValue, String(original));
|
||||
} else {
|
||||
switch (instructionType) {
|
||||
case "i32":
|
||||
{
|
||||
return (0, _nodes.numberLiteral)((0, _wastParser.parse32I)(rawValue), String(original));
|
||||
}
|
||||
|
||||
case "u32":
|
||||
{
|
||||
return (0, _nodes.numberLiteral)((0, _wastParser.parseU32)(rawValue), String(original));
|
||||
}
|
||||
|
||||
case "i64":
|
||||
{
|
||||
return (0, _nodes.longNumberLiteral)((0, _wastParser.parse64I)(rawValue), String(original));
|
||||
}
|
||||
|
||||
case "f32":
|
||||
{
|
||||
return (0, _nodes.floatLiteral)((0, _wastParser.parse32F)(rawValue), (0, _wastParser.isNanLiteral)(rawValue), (0, _wastParser.isInfLiteral)(rawValue), String(original));
|
||||
}
|
||||
// f64
|
||||
|
||||
default:
|
||||
{
|
||||
return (0, _nodes.floatLiteral)((0, _wastParser.parse64F)(rawValue), (0, _wastParser.isNanLiteral)(rawValue), (0, _wastParser.isInfLiteral)(rawValue), String(original));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function instruction(id) {
|
||||
var args = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : [];
|
||||
var namedArgs = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
|
||||
return (0, _nodes.instr)(id, undefined, args, namedArgs);
|
||||
}
|
||||
|
||||
function objectInstruction(id, object) {
|
||||
var args = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : [];
|
||||
var namedArgs = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
|
||||
return (0, _nodes.instr)(id, object, args, namedArgs);
|
||||
}
|
||||
/**
|
||||
* Decorators
|
||||
*/
|
||||
|
||||
|
||||
function withLoc(n, end, start) {
|
||||
var loc = {
|
||||
start: start,
|
||||
end: end
|
||||
};
|
||||
n.loc = loc;
|
||||
return n;
|
||||
}
|
||||
|
||||
function withRaw(n, raw) {
|
||||
n.raw = raw;
|
||||
return n;
|
||||
}
|
||||
|
||||
function funcParam(valtype, id) {
|
||||
return {
|
||||
id: id,
|
||||
valtype: valtype
|
||||
};
|
||||
}
|
||||
|
||||
function indexLiteral(value) {
|
||||
// $FlowIgnore
|
||||
var x = numberLiteralFromRaw(value, "u32");
|
||||
return x;
|
||||
}
|
||||
|
||||
function memIndexLiteral(value) {
|
||||
// $FlowIgnore
|
||||
var x = numberLiteralFromRaw(value, "u32");
|
||||
return x;
|
||||
}
|
||||
144
node_modules/@webassemblyjs/ast/lib/node-path.js
generated
vendored
Normal file
144
node_modules/@webassemblyjs/ast/lib/node-path.js
generated
vendored
Normal file
@@ -0,0 +1,144 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.createPath = createPath;
|
||||
|
||||
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
|
||||
|
||||
function findParent(_ref, cb) {
|
||||
var parentPath = _ref.parentPath;
|
||||
|
||||
if (parentPath == null) {
|
||||
throw new Error("node is root");
|
||||
}
|
||||
|
||||
var currentPath = parentPath;
|
||||
|
||||
while (cb(currentPath) !== false) {
|
||||
// Hit the root node, stop
|
||||
// $FlowIgnore
|
||||
if (currentPath.parentPath == null) {
|
||||
return null;
|
||||
} // $FlowIgnore
|
||||
|
||||
|
||||
currentPath = currentPath.parentPath;
|
||||
}
|
||||
|
||||
return currentPath.node;
|
||||
}
|
||||
|
||||
function insertBefore(context, newNode) {
|
||||
return insert(context, newNode);
|
||||
}
|
||||
|
||||
function insertAfter(context, newNode) {
|
||||
return insert(context, newNode, 1);
|
||||
}
|
||||
|
||||
function insert(_ref2, newNode) {
|
||||
var node = _ref2.node,
|
||||
inList = _ref2.inList,
|
||||
parentPath = _ref2.parentPath,
|
||||
parentKey = _ref2.parentKey;
|
||||
var indexOffset = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
|
||||
|
||||
if (!inList) {
|
||||
throw new Error('inList' + " error: " + ("insert can only be used for nodes that are within lists" || "unknown"));
|
||||
}
|
||||
|
||||
if (!(parentPath != null)) {
|
||||
throw new Error('parentPath != null' + " error: " + ("Can not remove root node" || "unknown"));
|
||||
}
|
||||
|
||||
// $FlowIgnore
|
||||
var parentList = parentPath.node[parentKey];
|
||||
var indexInList = parentList.findIndex(function (n) {
|
||||
return n === node;
|
||||
});
|
||||
parentList.splice(indexInList + indexOffset, 0, newNode);
|
||||
}
|
||||
|
||||
function remove(_ref3) {
|
||||
var node = _ref3.node,
|
||||
parentKey = _ref3.parentKey,
|
||||
parentPath = _ref3.parentPath;
|
||||
|
||||
if (!(parentPath != null)) {
|
||||
throw new Error('parentPath != null' + " error: " + ("Can not remove root node" || "unknown"));
|
||||
}
|
||||
|
||||
// $FlowIgnore
|
||||
var parentNode = parentPath.node; // $FlowIgnore
|
||||
|
||||
var parentProperty = parentNode[parentKey];
|
||||
|
||||
if (Array.isArray(parentProperty)) {
|
||||
// $FlowIgnore
|
||||
parentNode[parentKey] = parentProperty.filter(function (n) {
|
||||
return n !== node;
|
||||
});
|
||||
} else {
|
||||
// $FlowIgnore
|
||||
delete parentNode[parentKey];
|
||||
}
|
||||
|
||||
node._deleted = true;
|
||||
}
|
||||
|
||||
function stop(context) {
|
||||
context.shouldStop = true;
|
||||
}
|
||||
|
||||
function replaceWith(context, newNode) {
|
||||
// $FlowIgnore
|
||||
var parentNode = context.parentPath.node; // $FlowIgnore
|
||||
|
||||
var parentProperty = parentNode[context.parentKey];
|
||||
|
||||
if (Array.isArray(parentProperty)) {
|
||||
var indexInList = parentProperty.findIndex(function (n) {
|
||||
return n === context.node;
|
||||
});
|
||||
parentProperty.splice(indexInList, 1, newNode);
|
||||
} else {
|
||||
// $FlowIgnore
|
||||
parentNode[context.parentKey] = newNode;
|
||||
}
|
||||
|
||||
context.node._deleted = true;
|
||||
context.node = newNode;
|
||||
} // bind the context to the first argument of node operations
|
||||
|
||||
|
||||
function bindNodeOperations(operations, context) {
|
||||
var keys = Object.keys(operations);
|
||||
var boundOperations = {};
|
||||
keys.forEach(function (key) {
|
||||
boundOperations[key] = operations[key].bind(null, context);
|
||||
});
|
||||
return boundOperations;
|
||||
}
|
||||
|
||||
function createPathOperations(context) {
|
||||
// $FlowIgnore
|
||||
return bindNodeOperations({
|
||||
findParent: findParent,
|
||||
replaceWith: replaceWith,
|
||||
remove: remove,
|
||||
insertBefore: insertBefore,
|
||||
insertAfter: insertAfter,
|
||||
stop: stop
|
||||
}, context);
|
||||
}
|
||||
|
||||
function createPath(context) {
|
||||
var path = _extends({}, context); // $FlowIgnore
|
||||
|
||||
|
||||
Object.assign(path, createPathOperations(path)); // $FlowIgnore
|
||||
|
||||
return path;
|
||||
}
|
||||
1134
node_modules/@webassemblyjs/ast/lib/nodes.js
generated
vendored
Normal file
1134
node_modules/@webassemblyjs/ast/lib/nodes.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
207
node_modules/@webassemblyjs/ast/lib/signatures.js
generated
vendored
Normal file
207
node_modules/@webassemblyjs/ast/lib/signatures.js
generated
vendored
Normal file
@@ -0,0 +1,207 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.signatures = void 0;
|
||||
|
||||
function sign(input, output) {
|
||||
return [input, output];
|
||||
}
|
||||
|
||||
var u32 = "u32";
|
||||
var i32 = "i32";
|
||||
var i64 = "i64";
|
||||
var f32 = "f32";
|
||||
var f64 = "f64";
|
||||
|
||||
var vector = function vector(t) {
|
||||
var vecType = [t]; // $FlowIgnore
|
||||
|
||||
vecType.vector = true;
|
||||
return vecType;
|
||||
};
|
||||
|
||||
var controlInstructions = {
|
||||
unreachable: sign([], []),
|
||||
nop: sign([], []),
|
||||
// block ?
|
||||
// loop ?
|
||||
// if ?
|
||||
// if else ?
|
||||
br: sign([u32], []),
|
||||
br_if: sign([u32], []),
|
||||
br_table: sign(vector(u32), []),
|
||||
return: sign([], []),
|
||||
call: sign([u32], []),
|
||||
call_indirect: sign([u32], [])
|
||||
};
|
||||
var parametricInstructions = {
|
||||
drop: sign([], []),
|
||||
select: sign([], [])
|
||||
};
|
||||
var variableInstructions = {
|
||||
get_local: sign([u32], []),
|
||||
set_local: sign([u32], []),
|
||||
tee_local: sign([u32], []),
|
||||
get_global: sign([u32], []),
|
||||
set_global: sign([u32], [])
|
||||
};
|
||||
var memoryInstructions = {
|
||||
"i32.load": sign([u32, u32], [i32]),
|
||||
"i64.load": sign([u32, u32], []),
|
||||
"f32.load": sign([u32, u32], []),
|
||||
"f64.load": sign([u32, u32], []),
|
||||
"i32.load8_s": sign([u32, u32], [i32]),
|
||||
"i32.load8_u": sign([u32, u32], [i32]),
|
||||
"i32.load16_s": sign([u32, u32], [i32]),
|
||||
"i32.load16_u": sign([u32, u32], [i32]),
|
||||
"i64.load8_s": sign([u32, u32], [i64]),
|
||||
"i64.load8_u": sign([u32, u32], [i64]),
|
||||
"i64.load16_s": sign([u32, u32], [i64]),
|
||||
"i64.load16_u": sign([u32, u32], [i64]),
|
||||
"i64.load32_s": sign([u32, u32], [i64]),
|
||||
"i64.load32_u": sign([u32, u32], [i64]),
|
||||
"i32.store": sign([u32, u32], []),
|
||||
"i64.store": sign([u32, u32], []),
|
||||
"f32.store": sign([u32, u32], []),
|
||||
"f64.store": sign([u32, u32], []),
|
||||
"i32.store8": sign([u32, u32], []),
|
||||
"i32.store16": sign([u32, u32], []),
|
||||
"i64.store8": sign([u32, u32], []),
|
||||
"i64.store16": sign([u32, u32], []),
|
||||
"i64.store32": sign([u32, u32], []),
|
||||
current_memory: sign([], []),
|
||||
grow_memory: sign([], [])
|
||||
};
|
||||
var numericInstructions = {
|
||||
"i32.const": sign([i32], [i32]),
|
||||
"i64.const": sign([i64], [i64]),
|
||||
"f32.const": sign([f32], [f32]),
|
||||
"f64.const": sign([f64], [f64]),
|
||||
"i32.eqz": sign([i32], [i32]),
|
||||
"i32.eq": sign([i32, i32], [i32]),
|
||||
"i32.ne": sign([i32, i32], [i32]),
|
||||
"i32.lt_s": sign([i32, i32], [i32]),
|
||||
"i32.lt_u": sign([i32, i32], [i32]),
|
||||
"i32.gt_s": sign([i32, i32], [i32]),
|
||||
"i32.gt_u": sign([i32, i32], [i32]),
|
||||
"i32.le_s": sign([i32, i32], [i32]),
|
||||
"i32.le_u": sign([i32, i32], [i32]),
|
||||
"i32.ge_s": sign([i32, i32], [i32]),
|
||||
"i32.ge_u": sign([i32, i32], [i32]),
|
||||
"i64.eqz": sign([i64], [i64]),
|
||||
"i64.eq": sign([i64, i64], [i32]),
|
||||
"i64.ne": sign([i64, i64], [i32]),
|
||||
"i64.lt_s": sign([i64, i64], [i32]),
|
||||
"i64.lt_u": sign([i64, i64], [i32]),
|
||||
"i64.gt_s": sign([i64, i64], [i32]),
|
||||
"i64.gt_u": sign([i64, i64], [i32]),
|
||||
"i64.le_s": sign([i64, i64], [i32]),
|
||||
"i64.le_u": sign([i64, i64], [i32]),
|
||||
"i64.ge_s": sign([i64, i64], [i32]),
|
||||
"i64.ge_u": sign([i64, i64], [i32]),
|
||||
"f32.eq": sign([f32, f32], [i32]),
|
||||
"f32.ne": sign([f32, f32], [i32]),
|
||||
"f32.lt": sign([f32, f32], [i32]),
|
||||
"f32.gt": sign([f32, f32], [i32]),
|
||||
"f32.le": sign([f32, f32], [i32]),
|
||||
"f32.ge": sign([f32, f32], [i32]),
|
||||
"f64.eq": sign([f64, f64], [i32]),
|
||||
"f64.ne": sign([f64, f64], [i32]),
|
||||
"f64.lt": sign([f64, f64], [i32]),
|
||||
"f64.gt": sign([f64, f64], [i32]),
|
||||
"f64.le": sign([f64, f64], [i32]),
|
||||
"f64.ge": sign([f64, f64], [i32]),
|
||||
"i32.clz": sign([i32], [i32]),
|
||||
"i32.ctz": sign([i32], [i32]),
|
||||
"i32.popcnt": sign([i32], [i32]),
|
||||
"i32.add": sign([i32, i32], [i32]),
|
||||
"i32.sub": sign([i32, i32], [i32]),
|
||||
"i32.mul": sign([i32, i32], [i32]),
|
||||
"i32.div_s": sign([i32, i32], [i32]),
|
||||
"i32.div_u": sign([i32, i32], [i32]),
|
||||
"i32.rem_s": sign([i32, i32], [i32]),
|
||||
"i32.rem_u": sign([i32, i32], [i32]),
|
||||
"i32.and": sign([i32, i32], [i32]),
|
||||
"i32.or": sign([i32, i32], [i32]),
|
||||
"i32.xor": sign([i32, i32], [i32]),
|
||||
"i32.shl": sign([i32, i32], [i32]),
|
||||
"i32.shr_s": sign([i32, i32], [i32]),
|
||||
"i32.shr_u": sign([i32, i32], [i32]),
|
||||
"i32.rotl": sign([i32, i32], [i32]),
|
||||
"i32.rotr": sign([i32, i32], [i32]),
|
||||
"i64.clz": sign([i64], [i64]),
|
||||
"i64.ctz": sign([i64], [i64]),
|
||||
"i64.popcnt": sign([i64], [i64]),
|
||||
"i64.add": sign([i64, i64], [i64]),
|
||||
"i64.sub": sign([i64, i64], [i64]),
|
||||
"i64.mul": sign([i64, i64], [i64]),
|
||||
"i64.div_s": sign([i64, i64], [i64]),
|
||||
"i64.div_u": sign([i64, i64], [i64]),
|
||||
"i64.rem_s": sign([i64, i64], [i64]),
|
||||
"i64.rem_u": sign([i64, i64], [i64]),
|
||||
"i64.and": sign([i64, i64], [i64]),
|
||||
"i64.or": sign([i64, i64], [i64]),
|
||||
"i64.xor": sign([i64, i64], [i64]),
|
||||
"i64.shl": sign([i64, i64], [i64]),
|
||||
"i64.shr_s": sign([i64, i64], [i64]),
|
||||
"i64.shr_u": sign([i64, i64], [i64]),
|
||||
"i64.rotl": sign([i64, i64], [i64]),
|
||||
"i64.rotr": sign([i64, i64], [i64]),
|
||||
"f32.abs": sign([f32], [f32]),
|
||||
"f32.neg": sign([f32], [f32]),
|
||||
"f32.ceil": sign([f32], [f32]),
|
||||
"f32.floor": sign([f32], [f32]),
|
||||
"f32.trunc": sign([f32], [f32]),
|
||||
"f32.nearest": sign([f32], [f32]),
|
||||
"f32.sqrt": sign([f32], [f32]),
|
||||
"f32.add": sign([f32, f32], [f32]),
|
||||
"f32.sub": sign([f32, f32], [f32]),
|
||||
"f32.mul": sign([f32, f32], [f32]),
|
||||
"f32.div": sign([f32, f32], [f32]),
|
||||
"f32.min": sign([f32, f32], [f32]),
|
||||
"f32.max": sign([f32, f32], [f32]),
|
||||
"f32.copysign": sign([f32, f32], [f32]),
|
||||
"f64.abs": sign([f64], [f64]),
|
||||
"f64.neg": sign([f64], [f64]),
|
||||
"f64.ceil": sign([f64], [f64]),
|
||||
"f64.floor": sign([f64], [f64]),
|
||||
"f64.trunc": sign([f64], [f64]),
|
||||
"f64.nearest": sign([f64], [f64]),
|
||||
"f64.sqrt": sign([f64], [f64]),
|
||||
"f64.add": sign([f64, f64], [f64]),
|
||||
"f64.sub": sign([f64, f64], [f64]),
|
||||
"f64.mul": sign([f64, f64], [f64]),
|
||||
"f64.div": sign([f64, f64], [f64]),
|
||||
"f64.min": sign([f64, f64], [f64]),
|
||||
"f64.max": sign([f64, f64], [f64]),
|
||||
"f64.copysign": sign([f64, f64], [f64]),
|
||||
"i32.wrap/i64": sign([i64], [i32]),
|
||||
"i32.trunc_s/f32": sign([f32], [i32]),
|
||||
"i32.trunc_u/f32": sign([f32], [i32]),
|
||||
"i32.trunc_s/f64": sign([f32], [i32]),
|
||||
"i32.trunc_u/f64": sign([f64], [i32]),
|
||||
"i64.extend_s/i32": sign([i32], [i64]),
|
||||
"i64.extend_u/i32": sign([i32], [i64]),
|
||||
"i64.trunc_s/f32": sign([f32], [i64]),
|
||||
"i64.trunc_u/f32": sign([f32], [i64]),
|
||||
"i64.trunc_s/f64": sign([f64], [i64]),
|
||||
"i64.trunc_u/f64": sign([f64], [i64]),
|
||||
"f32.convert_s/i32": sign([i32], [f32]),
|
||||
"f32.convert_u/i32": sign([i32], [f32]),
|
||||
"f32.convert_s/i64": sign([i64], [f32]),
|
||||
"f32.convert_u/i64": sign([i64], [f32]),
|
||||
"f32.demote/f64": sign([f64], [f32]),
|
||||
"f64.convert_s/i32": sign([i32], [f64]),
|
||||
"f64.convert_u/i32": sign([i32], [f64]),
|
||||
"f64.convert_s/i64": sign([i64], [f64]),
|
||||
"f64.convert_u/i64": sign([i64], [f64]),
|
||||
"f64.promote/f32": sign([f32], [f64]),
|
||||
"i32.reinterpret/f32": sign([f32], [i32]),
|
||||
"i64.reinterpret/f64": sign([f64], [i64]),
|
||||
"f32.reinterpret/i32": sign([i32], [f32]),
|
||||
"f64.reinterpret/i64": sign([i64], [f64])
|
||||
};
|
||||
var signatures = Object.assign({}, controlInstructions, parametricInstructions, variableInstructions, memoryInstructions, numericInstructions);
|
||||
exports.signatures = signatures;
|
||||
83
node_modules/@webassemblyjs/ast/lib/transform/denormalize-type-references/index.js
generated
vendored
Normal file
83
node_modules/@webassemblyjs/ast/lib/transform/denormalize-type-references/index.js
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.transform = transform;
|
||||
|
||||
var t = require("../../index"); // func and call_indirect instructions can either define a signature inline, or
|
||||
// reference a signature, e.g.
|
||||
//
|
||||
// ;; inline signature
|
||||
// (func (result i64)
|
||||
// (i64.const 2)
|
||||
// )
|
||||
// ;; signature reference
|
||||
// (type (func (result i64)))
|
||||
// (func (type 0)
|
||||
// (i64.const 2))
|
||||
// )
|
||||
//
|
||||
// this AST transform denormalises the type references, making all signatures within the module
|
||||
// inline.
|
||||
|
||||
|
||||
function transform(ast) {
|
||||
var typeInstructions = [];
|
||||
t.traverse(ast, {
|
||||
TypeInstruction: function TypeInstruction(_ref) {
|
||||
var node = _ref.node;
|
||||
typeInstructions.push(node);
|
||||
}
|
||||
});
|
||||
|
||||
if (!typeInstructions.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
function denormalizeSignature(signature) {
|
||||
// signature referenced by identifier
|
||||
if (signature.type === "Identifier") {
|
||||
var identifier = signature;
|
||||
var typeInstruction = typeInstructions.find(function (t) {
|
||||
return t.id.type === identifier.type && t.id.value === identifier.value;
|
||||
});
|
||||
|
||||
if (!typeInstruction) {
|
||||
throw new Error("A type instruction reference was not found ".concat(JSON.stringify(signature)));
|
||||
}
|
||||
|
||||
return typeInstruction.functype;
|
||||
} // signature referenced by index
|
||||
|
||||
|
||||
if (signature.type === "NumberLiteral") {
|
||||
var signatureRef = signature;
|
||||
var _typeInstruction = typeInstructions[signatureRef.value];
|
||||
return _typeInstruction.functype;
|
||||
}
|
||||
|
||||
return signature;
|
||||
}
|
||||
|
||||
t.traverse(ast, {
|
||||
Func: function (_Func) {
|
||||
function Func(_x) {
|
||||
return _Func.apply(this, arguments);
|
||||
}
|
||||
|
||||
Func.toString = function () {
|
||||
return _Func.toString();
|
||||
};
|
||||
|
||||
return Func;
|
||||
}(function (_ref2) {
|
||||
var node = _ref2.node;
|
||||
node.signature = denormalizeSignature(node.signature);
|
||||
}),
|
||||
CallIndirectInstruction: function CallIndirectInstruction(_ref3) {
|
||||
var node = _ref3.node;
|
||||
node.signature = denormalizeSignature(node.signature);
|
||||
}
|
||||
});
|
||||
}
|
||||
225
node_modules/@webassemblyjs/ast/lib/transform/wast-identifier-to-index/index.js
generated
vendored
Normal file
225
node_modules/@webassemblyjs/ast/lib/transform/wast-identifier-to-index/index.js
generated
vendored
Normal file
@@ -0,0 +1,225 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.transform = transform;
|
||||
|
||||
var _index = require("../../index");
|
||||
|
||||
var _helperModuleContext = require("@webassemblyjs/helper-module-context");
|
||||
|
||||
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
|
||||
|
||||
function _sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
|
||||
|
||||
function _slicedToArray(arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return _sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }
|
||||
|
||||
// FIXME(sven): do the same with all block instructions, must be more generic here
|
||||
function newUnexpectedFunction(i) {
|
||||
return new Error("unknown function at offset: " + i);
|
||||
}
|
||||
|
||||
function transform(ast) {
|
||||
var module;
|
||||
(0, _index.traverse)(ast, {
|
||||
Module: function (_Module) {
|
||||
function Module(_x) {
|
||||
return _Module.apply(this, arguments);
|
||||
}
|
||||
|
||||
Module.toString = function () {
|
||||
return _Module.toString();
|
||||
};
|
||||
|
||||
return Module;
|
||||
}(function (path) {
|
||||
module = path.node;
|
||||
})
|
||||
});
|
||||
var moduleContext = (0, _helperModuleContext.moduleContextFromModuleAST)(module); // Transform the actual instruction in function bodies
|
||||
|
||||
(0, _index.traverse)(ast, {
|
||||
Func: function (_Func) {
|
||||
function Func(_x2) {
|
||||
return _Func.apply(this, arguments);
|
||||
}
|
||||
|
||||
Func.toString = function () {
|
||||
return _Func.toString();
|
||||
};
|
||||
|
||||
return Func;
|
||||
}(function (path) {
|
||||
transformFuncPath(path, moduleContext);
|
||||
}),
|
||||
Start: function (_Start) {
|
||||
function Start(_x3) {
|
||||
return _Start.apply(this, arguments);
|
||||
}
|
||||
|
||||
Start.toString = function () {
|
||||
return _Start.toString();
|
||||
};
|
||||
|
||||
return Start;
|
||||
}(function (path) {
|
||||
var index = path.node.index;
|
||||
|
||||
if ((0, _index.isIdentifier)(index) === true) {
|
||||
var offsetInModule = moduleContext.getFunctionOffsetByIdentifier(index.value);
|
||||
|
||||
if (typeof offsetInModule === "undefined") {
|
||||
throw newUnexpectedFunction(index.value);
|
||||
} // Replace the index Identifier
|
||||
// $FlowIgnore: reference?
|
||||
|
||||
|
||||
path.node.index = (0, _index.numberLiteralFromRaw)(offsetInModule);
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
function transformFuncPath(funcPath, moduleContext) {
|
||||
var funcNode = funcPath.node;
|
||||
var signature = funcNode.signature;
|
||||
|
||||
if (signature.type !== "Signature") {
|
||||
throw new Error("Function signatures must be denormalised before execution");
|
||||
}
|
||||
|
||||
var params = signature.params; // Add func locals in the context
|
||||
|
||||
params.forEach(function (p) {
|
||||
return moduleContext.addLocal(p.valtype);
|
||||
});
|
||||
(0, _index.traverse)(funcNode, {
|
||||
Instr: function (_Instr) {
|
||||
function Instr(_x4) {
|
||||
return _Instr.apply(this, arguments);
|
||||
}
|
||||
|
||||
Instr.toString = function () {
|
||||
return _Instr.toString();
|
||||
};
|
||||
|
||||
return Instr;
|
||||
}(function (instrPath) {
|
||||
var instrNode = instrPath.node;
|
||||
/**
|
||||
* Local access
|
||||
*/
|
||||
|
||||
if (instrNode.id === "get_local" || instrNode.id === "set_local" || instrNode.id === "tee_local") {
|
||||
var _instrNode$args = _slicedToArray(instrNode.args, 1),
|
||||
firstArg = _instrNode$args[0];
|
||||
|
||||
if (firstArg.type === "Identifier") {
|
||||
var offsetInParams = params.findIndex(function (_ref) {
|
||||
var id = _ref.id;
|
||||
return id === firstArg.value;
|
||||
});
|
||||
|
||||
if (offsetInParams === -1) {
|
||||
throw new Error("".concat(firstArg.value, " not found in ").concat(instrNode.id, ": not declared in func params"));
|
||||
} // Replace the Identifer node by our new NumberLiteral node
|
||||
|
||||
|
||||
instrNode.args[0] = (0, _index.numberLiteralFromRaw)(offsetInParams);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Global access
|
||||
*/
|
||||
|
||||
|
||||
if (instrNode.id === "get_global" || instrNode.id === "set_global") {
|
||||
var _instrNode$args2 = _slicedToArray(instrNode.args, 1),
|
||||
_firstArg = _instrNode$args2[0];
|
||||
|
||||
if ((0, _index.isIdentifier)(_firstArg) === true) {
|
||||
var globalOffset = moduleContext.getGlobalOffsetByIdentifier( // $FlowIgnore: reference?
|
||||
_firstArg.value);
|
||||
|
||||
if (typeof globalOffset === "undefined") {
|
||||
// $FlowIgnore: reference?
|
||||
throw new Error("global ".concat(_firstArg.value, " not found in module"));
|
||||
} // Replace the Identifer node by our new NumberLiteral node
|
||||
|
||||
|
||||
instrNode.args[0] = (0, _index.numberLiteralFromRaw)(globalOffset);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Labels lookup
|
||||
*/
|
||||
|
||||
|
||||
if (instrNode.id === "br") {
|
||||
var _instrNode$args3 = _slicedToArray(instrNode.args, 1),
|
||||
_firstArg2 = _instrNode$args3[0];
|
||||
|
||||
if ((0, _index.isIdentifier)(_firstArg2) === true) {
|
||||
// if the labels is not found it is going to be replaced with -1
|
||||
// which is invalid.
|
||||
var relativeBlockCount = -1; // $FlowIgnore: reference?
|
||||
|
||||
instrPath.findParent(function (_ref2) {
|
||||
var node = _ref2.node;
|
||||
|
||||
if ((0, _index.isBlock)(node)) {
|
||||
relativeBlockCount++; // $FlowIgnore: reference?
|
||||
|
||||
var name = node.label || node.name;
|
||||
|
||||
if (_typeof(name) === "object") {
|
||||
// $FlowIgnore: isIdentifier ensures that
|
||||
if (name.value === _firstArg2.value) {
|
||||
// Found it
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ((0, _index.isFunc)(node)) {
|
||||
return false;
|
||||
}
|
||||
}); // Replace the Identifer node by our new NumberLiteral node
|
||||
|
||||
instrNode.args[0] = (0, _index.numberLiteralFromRaw)(relativeBlockCount);
|
||||
}
|
||||
}
|
||||
}),
|
||||
|
||||
/**
|
||||
* Func lookup
|
||||
*/
|
||||
CallInstruction: function (_CallInstruction) {
|
||||
function CallInstruction(_x5) {
|
||||
return _CallInstruction.apply(this, arguments);
|
||||
}
|
||||
|
||||
CallInstruction.toString = function () {
|
||||
return _CallInstruction.toString();
|
||||
};
|
||||
|
||||
return CallInstruction;
|
||||
}(function (_ref3) {
|
||||
var node = _ref3.node;
|
||||
var index = node.index;
|
||||
|
||||
if ((0, _index.isIdentifier)(index) === true) {
|
||||
var offsetInModule = moduleContext.getFunctionOffsetByIdentifier(index.value);
|
||||
|
||||
if (typeof offsetInModule === "undefined") {
|
||||
throw newUnexpectedFunction(index.value);
|
||||
} // Replace the index Identifier
|
||||
// $FlowIgnore: reference?
|
||||
|
||||
|
||||
node.index = (0, _index.numberLiteralFromRaw)(offsetInModule);
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
105
node_modules/@webassemblyjs/ast/lib/traverse.js
generated
vendored
Normal file
105
node_modules/@webassemblyjs/ast/lib/traverse.js
generated
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.traverse = traverse;
|
||||
|
||||
var _nodePath = require("./node-path");
|
||||
|
||||
var _nodes = require("./nodes");
|
||||
|
||||
// recursively walks the AST starting at the given node. The callback is invoked for
|
||||
// and object that has a 'type' property.
|
||||
function walk(context, callback) {
|
||||
var stop = false;
|
||||
|
||||
function innerWalk(context, callback) {
|
||||
if (stop) {
|
||||
return;
|
||||
}
|
||||
|
||||
var node = context.node;
|
||||
|
||||
if (node === undefined) {
|
||||
console.warn("traversing with an empty context");
|
||||
return;
|
||||
}
|
||||
|
||||
if (node._deleted === true) {
|
||||
return;
|
||||
}
|
||||
|
||||
var path = (0, _nodePath.createPath)(context);
|
||||
callback(node.type, path);
|
||||
|
||||
if (path.shouldStop) {
|
||||
stop = true;
|
||||
return;
|
||||
}
|
||||
|
||||
Object.keys(node).forEach(function (prop) {
|
||||
var value = node[prop];
|
||||
|
||||
if (value === null || value === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
var valueAsArray = Array.isArray(value) ? value : [value];
|
||||
valueAsArray.forEach(function (childNode) {
|
||||
if (typeof childNode.type === "string") {
|
||||
var childContext = {
|
||||
node: childNode,
|
||||
parentKey: prop,
|
||||
parentPath: path,
|
||||
shouldStop: false,
|
||||
inList: Array.isArray(value)
|
||||
};
|
||||
innerWalk(childContext, callback);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
innerWalk(context, callback);
|
||||
}
|
||||
|
||||
var noop = function noop() {};
|
||||
|
||||
function traverse(node, visitors) {
|
||||
var before = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : noop;
|
||||
var after = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : noop;
|
||||
Object.keys(visitors).forEach(function (visitor) {
|
||||
if (!_nodes.nodeAndUnionTypes.includes(visitor)) {
|
||||
throw new Error("Unexpected visitor ".concat(visitor));
|
||||
}
|
||||
});
|
||||
var context = {
|
||||
node: node,
|
||||
inList: false,
|
||||
shouldStop: false,
|
||||
parentPath: null,
|
||||
parentKey: null
|
||||
};
|
||||
walk(context, function (type, path) {
|
||||
if (typeof visitors[type] === "function") {
|
||||
before(type, path);
|
||||
visitors[type](path);
|
||||
after(type, path);
|
||||
}
|
||||
|
||||
var unionTypes = _nodes.unionTypesMap[type];
|
||||
|
||||
if (!unionTypes) {
|
||||
throw new Error("Unexpected node type ".concat(type));
|
||||
}
|
||||
|
||||
unionTypes.forEach(function (unionType) {
|
||||
if (typeof visitors[unionType] === "function") {
|
||||
before(unionType, path);
|
||||
visitors[unionType](path);
|
||||
after(unionType, path);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
0
node_modules/@webassemblyjs/ast/lib/types/basic.js
generated
vendored
Normal file
0
node_modules/@webassemblyjs/ast/lib/types/basic.js
generated
vendored
Normal file
0
node_modules/@webassemblyjs/ast/lib/types/nodes.js
generated
vendored
Normal file
0
node_modules/@webassemblyjs/ast/lib/types/nodes.js
generated
vendored
Normal file
0
node_modules/@webassemblyjs/ast/lib/types/traverse.js
generated
vendored
Normal file
0
node_modules/@webassemblyjs/ast/lib/types/traverse.js
generated
vendored
Normal file
306
node_modules/@webassemblyjs/ast/lib/utils.js
generated
vendored
Normal file
306
node_modules/@webassemblyjs/ast/lib/utils.js
generated
vendored
Normal file
@@ -0,0 +1,306 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.isAnonymous = isAnonymous;
|
||||
exports.getSectionMetadata = getSectionMetadata;
|
||||
exports.getSectionMetadatas = getSectionMetadatas;
|
||||
exports.sortSectionMetadata = sortSectionMetadata;
|
||||
exports.orderedInsertNode = orderedInsertNode;
|
||||
exports.assertHasLoc = assertHasLoc;
|
||||
exports.getEndOfSection = getEndOfSection;
|
||||
exports.shiftLoc = shiftLoc;
|
||||
exports.shiftSection = shiftSection;
|
||||
exports.signatureForOpcode = signatureForOpcode;
|
||||
exports.getUniqueNameGenerator = getUniqueNameGenerator;
|
||||
exports.getStartByteOffset = getStartByteOffset;
|
||||
exports.getEndByteOffset = getEndByteOffset;
|
||||
exports.getFunctionBeginingByteOffset = getFunctionBeginingByteOffset;
|
||||
exports.getEndBlockByteOffset = getEndBlockByteOffset;
|
||||
exports.getStartBlockByteOffset = getStartBlockByteOffset;
|
||||
|
||||
var _signatures = require("./signatures");
|
||||
|
||||
var _traverse = require("./traverse");
|
||||
|
||||
var _helperWasmBytecode = _interopRequireWildcard(require("@webassemblyjs/helper-wasm-bytecode"));
|
||||
|
||||
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }
|
||||
|
||||
function _sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
|
||||
|
||||
function _slicedToArray(arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return _sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }
|
||||
|
||||
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
|
||||
|
||||
function isAnonymous(ident) {
|
||||
return ident.raw === "";
|
||||
}
|
||||
|
||||
function getSectionMetadata(ast, name) {
|
||||
var section;
|
||||
(0, _traverse.traverse)(ast, {
|
||||
SectionMetadata: function (_SectionMetadata) {
|
||||
function SectionMetadata(_x) {
|
||||
return _SectionMetadata.apply(this, arguments);
|
||||
}
|
||||
|
||||
SectionMetadata.toString = function () {
|
||||
return _SectionMetadata.toString();
|
||||
};
|
||||
|
||||
return SectionMetadata;
|
||||
}(function (_ref) {
|
||||
var node = _ref.node;
|
||||
|
||||
if (node.section === name) {
|
||||
section = node;
|
||||
}
|
||||
})
|
||||
});
|
||||
return section;
|
||||
}
|
||||
|
||||
function getSectionMetadatas(ast, name) {
|
||||
var sections = [];
|
||||
(0, _traverse.traverse)(ast, {
|
||||
SectionMetadata: function (_SectionMetadata2) {
|
||||
function SectionMetadata(_x2) {
|
||||
return _SectionMetadata2.apply(this, arguments);
|
||||
}
|
||||
|
||||
SectionMetadata.toString = function () {
|
||||
return _SectionMetadata2.toString();
|
||||
};
|
||||
|
||||
return SectionMetadata;
|
||||
}(function (_ref2) {
|
||||
var node = _ref2.node;
|
||||
|
||||
if (node.section === name) {
|
||||
sections.push(node);
|
||||
}
|
||||
})
|
||||
});
|
||||
return sections;
|
||||
}
|
||||
|
||||
function sortSectionMetadata(m) {
|
||||
if (m.metadata == null) {
|
||||
console.warn("sortSectionMetadata: no metadata to sort");
|
||||
return;
|
||||
} // $FlowIgnore
|
||||
|
||||
|
||||
m.metadata.sections.sort(function (a, b) {
|
||||
var aId = _helperWasmBytecode.default.sections[a.section];
|
||||
var bId = _helperWasmBytecode.default.sections[b.section];
|
||||
|
||||
if (typeof aId !== "number" || typeof bId !== "number") {
|
||||
throw new Error("Section id not found");
|
||||
}
|
||||
|
||||
return aId - bId;
|
||||
});
|
||||
}
|
||||
|
||||
function orderedInsertNode(m, n) {
|
||||
assertHasLoc(n);
|
||||
var didInsert = false;
|
||||
|
||||
if (n.type === "ModuleExport") {
|
||||
m.fields.push(n);
|
||||
return;
|
||||
}
|
||||
|
||||
m.fields = m.fields.reduce(function (acc, field) {
|
||||
var fieldEndCol = Infinity;
|
||||
|
||||
if (field.loc != null) {
|
||||
// $FlowIgnore
|
||||
fieldEndCol = field.loc.end.column;
|
||||
} // $FlowIgnore: assertHasLoc ensures that
|
||||
|
||||
|
||||
if (didInsert === false && n.loc.start.column < fieldEndCol) {
|
||||
didInsert = true;
|
||||
acc.push(n);
|
||||
}
|
||||
|
||||
acc.push(field);
|
||||
return acc;
|
||||
}, []); // Handles empty modules or n is the last element
|
||||
|
||||
if (didInsert === false) {
|
||||
m.fields.push(n);
|
||||
}
|
||||
}
|
||||
|
||||
function assertHasLoc(n) {
|
||||
if (n.loc == null || n.loc.start == null || n.loc.end == null) {
|
||||
throw new Error("Internal failure: node (".concat(JSON.stringify(n.type), ") has no location information"));
|
||||
}
|
||||
}
|
||||
|
||||
function getEndOfSection(s) {
|
||||
assertHasLoc(s.size);
|
||||
return s.startOffset + s.size.value + ( // $FlowIgnore
|
||||
s.size.loc.end.column - s.size.loc.start.column);
|
||||
}
|
||||
|
||||
function shiftLoc(node, delta) {
|
||||
// $FlowIgnore
|
||||
node.loc.start.column += delta; // $FlowIgnore
|
||||
|
||||
node.loc.end.column += delta;
|
||||
}
|
||||
|
||||
function shiftSection(ast, node, delta) {
|
||||
if (node.type !== "SectionMetadata") {
|
||||
throw new Error("Can not shift node " + JSON.stringify(node.type));
|
||||
}
|
||||
|
||||
node.startOffset += delta;
|
||||
|
||||
if (_typeof(node.size.loc) === "object") {
|
||||
shiftLoc(node.size, delta);
|
||||
} // Custom sections doesn't have vectorOfSize
|
||||
|
||||
|
||||
if (_typeof(node.vectorOfSize) === "object" && _typeof(node.vectorOfSize.loc) === "object") {
|
||||
shiftLoc(node.vectorOfSize, delta);
|
||||
}
|
||||
|
||||
var sectionName = node.section; // shift node locations within that section
|
||||
|
||||
(0, _traverse.traverse)(ast, {
|
||||
Node: function Node(_ref3) {
|
||||
var node = _ref3.node;
|
||||
var section = (0, _helperWasmBytecode.getSectionForNode)(node);
|
||||
|
||||
if (section === sectionName && _typeof(node.loc) === "object") {
|
||||
shiftLoc(node, delta);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function signatureForOpcode(object, name) {
|
||||
var opcodeName = name;
|
||||
|
||||
if (object !== undefined && object !== "") {
|
||||
opcodeName = object + "." + name;
|
||||
}
|
||||
|
||||
var sign = _signatures.signatures[opcodeName];
|
||||
|
||||
if (sign == undefined) {
|
||||
// TODO: Uncomment this when br_table and others has been done
|
||||
//throw new Error("Invalid opcode: "+opcodeName);
|
||||
return [object, object];
|
||||
}
|
||||
|
||||
return sign[0];
|
||||
}
|
||||
|
||||
function getUniqueNameGenerator() {
|
||||
var inc = {};
|
||||
return function () {
|
||||
var prefix = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : "temp";
|
||||
|
||||
if (!(prefix in inc)) {
|
||||
inc[prefix] = 0;
|
||||
} else {
|
||||
inc[prefix] = inc[prefix] + 1;
|
||||
}
|
||||
|
||||
return prefix + "_" + inc[prefix];
|
||||
};
|
||||
}
|
||||
|
||||
function getStartByteOffset(n) {
|
||||
// $FlowIgnore
|
||||
if (typeof n.loc === "undefined" || typeof n.loc.start === "undefined") {
|
||||
throw new Error( // $FlowIgnore
|
||||
"Can not get byte offset without loc informations, node: " + String(n.id));
|
||||
}
|
||||
|
||||
return n.loc.start.column;
|
||||
}
|
||||
|
||||
function getEndByteOffset(n) {
|
||||
// $FlowIgnore
|
||||
if (typeof n.loc === "undefined" || typeof n.loc.end === "undefined") {
|
||||
throw new Error("Can not get byte offset without loc informations, node: " + n.type);
|
||||
}
|
||||
|
||||
return n.loc.end.column;
|
||||
}
|
||||
|
||||
function getFunctionBeginingByteOffset(n) {
|
||||
if (!(n.body.length > 0)) {
|
||||
throw new Error('n.body.length > 0' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var _n$body = _slicedToArray(n.body, 1),
|
||||
firstInstruction = _n$body[0];
|
||||
|
||||
return getStartByteOffset(firstInstruction);
|
||||
}
|
||||
|
||||
function getEndBlockByteOffset(n) {
|
||||
// $FlowIgnore
|
||||
if (!(n.instr.length > 0 || n.body.length > 0)) {
|
||||
throw new Error('n.instr.length > 0 || n.body.length > 0' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var lastInstruction;
|
||||
|
||||
if (n.instr) {
|
||||
// $FlowIgnore
|
||||
lastInstruction = n.instr[n.instr.length - 1];
|
||||
}
|
||||
|
||||
if (n.body) {
|
||||
// $FlowIgnore
|
||||
lastInstruction = n.body[n.body.length - 1];
|
||||
}
|
||||
|
||||
if (!(_typeof(lastInstruction) === "object")) {
|
||||
throw new Error('typeof lastInstruction === "object"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
// $FlowIgnore
|
||||
return getStartByteOffset(lastInstruction);
|
||||
}
|
||||
|
||||
function getStartBlockByteOffset(n) {
|
||||
// $FlowIgnore
|
||||
if (!(n.instr.length > 0 || n.body.length > 0)) {
|
||||
throw new Error('n.instr.length > 0 || n.body.length > 0' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
var fistInstruction;
|
||||
|
||||
if (n.instr) {
|
||||
// $FlowIgnore
|
||||
var _n$instr = _slicedToArray(n.instr, 1);
|
||||
|
||||
fistInstruction = _n$instr[0];
|
||||
}
|
||||
|
||||
if (n.body) {
|
||||
// $FlowIgnore
|
||||
var _n$body2 = _slicedToArray(n.body, 1);
|
||||
|
||||
fistInstruction = _n$body2[0];
|
||||
}
|
||||
|
||||
if (!(_typeof(fistInstruction) === "object")) {
|
||||
throw new Error('typeof fistInstruction === "object"' + " error: " + (undefined || "unknown"));
|
||||
}
|
||||
|
||||
// $FlowIgnore
|
||||
return getStartByteOffset(fistInstruction);
|
||||
}
|
||||
33
node_modules/@webassemblyjs/ast/package.json
generated
vendored
Normal file
33
node_modules/@webassemblyjs/ast/package.json
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"name": "@webassemblyjs/ast",
|
||||
"version": "1.9.0",
|
||||
"description": "AST utils for webassemblyjs",
|
||||
"keywords": [
|
||||
"webassembly",
|
||||
"javascript",
|
||||
"ast"
|
||||
],
|
||||
"main": "lib/index.js",
|
||||
"module": "esm/index.js",
|
||||
"author": "Sven Sauleau",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@webassemblyjs/helper-module-context": "1.9.0",
|
||||
"@webassemblyjs/helper-wasm-bytecode": "1.9.0",
|
||||
"@webassemblyjs/wast-parser": "1.9.0"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/xtuc/webassemblyjs.git"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@webassemblyjs/helper-test-framework": "1.9.0",
|
||||
"array.prototype.flatmap": "^1.2.1",
|
||||
"dump-exports": "^0.1.0",
|
||||
"mamacro": "^0.0.7"
|
||||
},
|
||||
"gitHead": "0440b420888c1f7701eb9762ec657775506b87d8"
|
||||
}
|
||||
219
node_modules/@webassemblyjs/ast/scripts/generateNodeUtils.js
generated
vendored
Normal file
219
node_modules/@webassemblyjs/ast/scripts/generateNodeUtils.js
generated
vendored
Normal file
@@ -0,0 +1,219 @@
|
||||
const definitions = require("../src/definitions");
|
||||
const flatMap = require("array.prototype.flatmap");
|
||||
const {
|
||||
typeSignature,
|
||||
iterateProps,
|
||||
mapProps,
|
||||
filterProps,
|
||||
unique
|
||||
} = require("./util");
|
||||
|
||||
const stdout = process.stdout;
|
||||
|
||||
const jsTypes = ["string", "number", "boolean"];
|
||||
|
||||
const quote = value => `"${value}"`;
|
||||
|
||||
function params(fields) {
|
||||
const optionalDefault = field => (field.default ? ` = ${field.default}` : "");
|
||||
return mapProps(fields)
|
||||
.map(field => `${typeSignature(field)}${optionalDefault(field)}`)
|
||||
.join(",");
|
||||
}
|
||||
|
||||
function assertParamType({ assertNodeType, array, name, type }) {
|
||||
if (array) {
|
||||
// TODO - assert contents of array?
|
||||
return `assert(typeof ${name} === "object" && typeof ${name}.length !== "undefined")\n`;
|
||||
} else {
|
||||
if (jsTypes.includes(type)) {
|
||||
return `assert(
|
||||
typeof ${name} === "${type}",
|
||||
"Argument ${name} must be of type ${type}, given: " + typeof ${name}
|
||||
)`;
|
||||
}
|
||||
|
||||
if (assertNodeType === true) {
|
||||
return `assert(
|
||||
${name}.type === "${type}",
|
||||
"Argument ${name} must be of type ${type}, given: " + ${name}.type
|
||||
)`;
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
function assertParam(meta) {
|
||||
const paramAssertion = assertParamType(meta);
|
||||
|
||||
if (paramAssertion === "") {
|
||||
return "";
|
||||
}
|
||||
|
||||
if (meta.maybe || meta.optional) {
|
||||
return `
|
||||
if (${meta.name} !== null && ${meta.name} !== undefined) {
|
||||
${paramAssertion};
|
||||
}
|
||||
`;
|
||||
} else {
|
||||
return paramAssertion;
|
||||
}
|
||||
}
|
||||
|
||||
function assertParams(fields) {
|
||||
return mapProps(fields)
|
||||
.map(assertParam)
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
function buildObject(typeDef) {
|
||||
const optionalField = meta => {
|
||||
if (meta.array) {
|
||||
// omit optional array properties if the constructor function was supplied
|
||||
// with an empty array
|
||||
return `
|
||||
if (typeof ${meta.name} !== "undefined" && ${meta.name}.length > 0) {
|
||||
node.${meta.name} = ${meta.name};
|
||||
}
|
||||
`;
|
||||
} else if (meta.type === "Object") {
|
||||
// omit optional object properties if they have no keys
|
||||
return `
|
||||
if (typeof ${meta.name} !== "undefined" && Object.keys(${
|
||||
meta.name
|
||||
}).length !== 0) {
|
||||
node.${meta.name} = ${meta.name};
|
||||
}
|
||||
`;
|
||||
} else if (meta.type === "boolean") {
|
||||
// omit optional boolean properties if they are not true
|
||||
return `
|
||||
if (${meta.name} === true) {
|
||||
node.${meta.name} = true;
|
||||
}
|
||||
`;
|
||||
} else {
|
||||
return `
|
||||
if (typeof ${meta.name} !== "undefined") {
|
||||
node.${meta.name} = ${meta.name};
|
||||
}
|
||||
`;
|
||||
}
|
||||
};
|
||||
|
||||
const fields = mapProps(typeDef.fields)
|
||||
.filter(f => !f.optional && !f.constant)
|
||||
.map(f => f.name);
|
||||
|
||||
const constants = mapProps(typeDef.fields)
|
||||
.filter(f => f.constant)
|
||||
.map(f => `${f.name}: "${f.value}"`);
|
||||
|
||||
return `
|
||||
const node: ${typeDef.flowTypeName || typeDef.name} = {
|
||||
type: "${typeDef.name}",
|
||||
${constants.concat(fields).join(",")}
|
||||
}
|
||||
|
||||
${mapProps(typeDef.fields)
|
||||
.filter(f => f.optional)
|
||||
.map(optionalField)
|
||||
.join("")}
|
||||
`;
|
||||
}
|
||||
|
||||
function lowerCamelCase(name) {
|
||||
return name.substring(0, 1).toLowerCase() + name.substring(1);
|
||||
}
|
||||
|
||||
function generate() {
|
||||
stdout.write(`
|
||||
// @flow
|
||||
|
||||
// THIS FILE IS AUTOGENERATED
|
||||
// see scripts/generateNodeUtils.js
|
||||
|
||||
import { assert } from "mamacro";
|
||||
|
||||
function isTypeOf(t: string) {
|
||||
return (n: Node) => n.type === t;
|
||||
}
|
||||
|
||||
function assertTypeOf(t: string) {
|
||||
return (n: Node) => assert(n.type === t);
|
||||
}
|
||||
`);
|
||||
|
||||
// Node builders
|
||||
iterateProps(definitions, typeDefinition => {
|
||||
stdout.write(`
|
||||
export function ${lowerCamelCase(typeDefinition.name)} (
|
||||
${params(filterProps(typeDefinition.fields, f => !f.constant))}
|
||||
): ${typeDefinition.name} {
|
||||
|
||||
${assertParams(filterProps(typeDefinition.fields, f => !f.constant))}
|
||||
${buildObject(typeDefinition)}
|
||||
|
||||
return node;
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
// Node testers
|
||||
iterateProps(definitions, typeDefinition => {
|
||||
stdout.write(`
|
||||
export const is${typeDefinition.name} =
|
||||
isTypeOf("${typeDefinition.name}");
|
||||
`);
|
||||
});
|
||||
|
||||
// Node union type testers
|
||||
const unionTypes = unique(
|
||||
flatMap(mapProps(definitions).filter(d => d.unionType), d => d.unionType)
|
||||
);
|
||||
unionTypes.forEach(unionType => {
|
||||
stdout.write(
|
||||
`
|
||||
export const is${unionType} = (node: Node) => ` +
|
||||
mapProps(definitions)
|
||||
.filter(d => d.unionType && d.unionType.includes(unionType))
|
||||
.map(d => `is${d.name}(node) `)
|
||||
.join("||") +
|
||||
";\n\n"
|
||||
);
|
||||
});
|
||||
|
||||
// Node assertion
|
||||
iterateProps(definitions, typeDefinition => {
|
||||
stdout.write(`
|
||||
export const assert${typeDefinition.name} =
|
||||
assertTypeOf("${typeDefinition.name}");
|
||||
`);
|
||||
});
|
||||
|
||||
// a map from node type to its set of union types
|
||||
stdout.write(
|
||||
`
|
||||
export const unionTypesMap = {` +
|
||||
mapProps(definitions)
|
||||
.filter(d => d.unionType)
|
||||
.map(t => `"${t.name}": [${t.unionType.map(quote).join(",")}]\n`) +
|
||||
`};
|
||||
`
|
||||
);
|
||||
|
||||
// an array of all node and union types
|
||||
stdout.write(
|
||||
`
|
||||
export const nodeAndUnionTypes = [` +
|
||||
mapProps(definitions)
|
||||
.map(t => `"${t.name}"`)
|
||||
.concat(unionTypes.map(quote))
|
||||
.join(",") +
|
||||
`];`
|
||||
);
|
||||
}
|
||||
|
||||
generate();
|
||||
47
node_modules/@webassemblyjs/ast/scripts/generateTypeDefinitions.js
generated
vendored
Normal file
47
node_modules/@webassemblyjs/ast/scripts/generateTypeDefinitions.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
const definitions = require("../src/definitions");
|
||||
const flatMap = require("array.prototype.flatmap");
|
||||
const { typeSignature, mapProps, iterateProps, unique } = require("./util");
|
||||
|
||||
const stdout = process.stdout;
|
||||
|
||||
function params(fields) {
|
||||
return mapProps(fields)
|
||||
.map(typeSignature)
|
||||
.join(",");
|
||||
}
|
||||
|
||||
function generate() {
|
||||
stdout.write(`
|
||||
// @flow
|
||||
/* eslint no-unused-vars: off */
|
||||
|
||||
// THIS FILE IS AUTOGENERATED
|
||||
// see scripts/generateTypeDefinitions.js
|
||||
`);
|
||||
|
||||
// generate union types
|
||||
const unionTypes = unique(
|
||||
flatMap(mapProps(definitions).filter(d => d.unionType), d => d.unionType)
|
||||
);
|
||||
unionTypes.forEach(unionType => {
|
||||
stdout.write(
|
||||
`type ${unionType} = ` +
|
||||
mapProps(definitions)
|
||||
.filter(d => d.unionType && d.unionType.includes(unionType))
|
||||
.map(d => d.name)
|
||||
.join("|") +
|
||||
";\n\n"
|
||||
);
|
||||
});
|
||||
|
||||
// generate the type definitions
|
||||
iterateProps(definitions, typeDef => {
|
||||
stdout.write(`type ${typeDef.name} = {
|
||||
...BaseNode,
|
||||
type: "${typeDef.name}",
|
||||
${params(typeDef.fields)}
|
||||
};\n\n`);
|
||||
});
|
||||
}
|
||||
|
||||
generate();
|
||||
38
node_modules/@webassemblyjs/ast/scripts/util.js
generated
vendored
Normal file
38
node_modules/@webassemblyjs/ast/scripts/util.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
function iterateProps(obj, iterator) {
|
||||
Object.keys(obj).forEach(key => iterator({ ...obj[key], name: key }));
|
||||
}
|
||||
|
||||
function mapProps(obj) {
|
||||
return Object.keys(obj).map(key => ({ ...obj[key], name: key }));
|
||||
}
|
||||
|
||||
function filterProps(obj, filter) {
|
||||
const ret = {};
|
||||
Object.keys(obj).forEach(key => {
|
||||
if (filter(obj[key])) {
|
||||
ret[key] = obj[key];
|
||||
}
|
||||
});
|
||||
return ret;
|
||||
}
|
||||
|
||||
function typeSignature(meta) {
|
||||
const type = meta.array ? `Array<${meta.type}>` : meta.type;
|
||||
if (meta.optional) {
|
||||
return `${meta.name}?: ${type}`;
|
||||
} else if (meta.maybe) {
|
||||
return `${meta.name}: ?${type}`;
|
||||
} else {
|
||||
return `${meta.name}: ${type}`;
|
||||
}
|
||||
}
|
||||
|
||||
const unique = items => Array.from(new Set(items));
|
||||
|
||||
module.exports = {
|
||||
iterateProps,
|
||||
mapProps,
|
||||
filterProps,
|
||||
typeSignature,
|
||||
unique
|
||||
};
|
||||
21
node_modules/@webassemblyjs/floating-point-hex-parser/LICENSE
generated
vendored
Normal file
21
node_modules/@webassemblyjs/floating-point-hex-parser/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2017 Mauro Bringolf
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
34
node_modules/@webassemblyjs/floating-point-hex-parser/README.md
generated
vendored
Normal file
34
node_modules/@webassemblyjs/floating-point-hex-parser/README.md
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
# Parser function for floating point hexadecimals
|
||||
|
||||
[]()
|
||||
[]()
|
||||
[]()
|
||||
|
||||
> A JavaScript function to parse floating point hexadecimals as defined by the [WebAssembly specification](https://webassembly.github.io/spec/core/text/values.html#text-hexfloat).
|
||||
|
||||
## Usage
|
||||
|
||||
```javascript
|
||||
import parseHexFloat from '@webassemblyjs/floating-point-hex-parser'
|
||||
|
||||
parseHexFloat('0x1p-1') // 0.5
|
||||
parseHexFloat('0x1.921fb54442d18p+2') // 6.283185307179586
|
||||
```
|
||||
|
||||
## Tests
|
||||
|
||||
This module is tested in two ways. The first one is through a small set of test cases that can be found in [test/regular.test.js](https://github.com/maurobringolf/@webassemblyjs/floating-point-hex-parser/blob/master/test/regular.test.js). The second one is non-deterministic (sometimes called *fuzzing*):
|
||||
|
||||
1. Generate a random IEEE754 double precision value `x`.
|
||||
1. Compute its representation `y` in floating point hexadecimal format using the C standard library function `printf` since C supports this format.
|
||||
1. Give both values to JS testcase and see if `parseHexFloat(y) === x`.
|
||||
|
||||
By default one `npm test` run tests 100 random samples. If you want to do more, you can set the environment variable `FUZZ_AMOUNT` to whatever number of runs you'd like. Because it uses one child process for each sample, it is really slow though. For more details about the randomized tests see [the source](https://github.com/maurobringolf/@webassemblyjs/floating-point-hex-parser/tree/master/test/fuzzing).
|
||||
|
||||
## Links
|
||||
|
||||
* [maurobringolf.ch/2017/12/hexadecimal-floating-point-notation/](https://maurobringolf.ch/2017/12/hexadecimal-floating-point-notation/)
|
||||
|
||||
* [github.com/xtuc/js-webassembly-interpreter/issues/32](https://github.com/xtuc/js-webassembly-interpreter/issues/32)
|
||||
|
||||
* [github.com/WebAssembly/design/issues/292](https://github.com/WebAssembly/design/issues/292)
|
||||
42
node_modules/@webassemblyjs/floating-point-hex-parser/esm/index.js
generated
vendored
Normal file
42
node_modules/@webassemblyjs/floating-point-hex-parser/esm/index.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
export default function parse(input) {
|
||||
input = input.toUpperCase();
|
||||
var splitIndex = input.indexOf("P");
|
||||
var mantissa, exponent;
|
||||
|
||||
if (splitIndex !== -1) {
|
||||
mantissa = input.substring(0, splitIndex);
|
||||
exponent = parseInt(input.substring(splitIndex + 1));
|
||||
} else {
|
||||
mantissa = input;
|
||||
exponent = 0;
|
||||
}
|
||||
|
||||
var dotIndex = mantissa.indexOf(".");
|
||||
|
||||
if (dotIndex !== -1) {
|
||||
var integerPart = parseInt(mantissa.substring(0, dotIndex), 16);
|
||||
var sign = Math.sign(integerPart);
|
||||
integerPart = sign * integerPart;
|
||||
var fractionLength = mantissa.length - dotIndex - 1;
|
||||
var fractionalPart = parseInt(mantissa.substring(dotIndex + 1), 16);
|
||||
var fraction = fractionLength > 0 ? fractionalPart / Math.pow(16, fractionLength) : 0;
|
||||
|
||||
if (sign === 0) {
|
||||
if (fraction === 0) {
|
||||
mantissa = sign;
|
||||
} else {
|
||||
if (Object.is(sign, -0)) {
|
||||
mantissa = -fraction;
|
||||
} else {
|
||||
mantissa = fraction;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
mantissa = sign * (integerPart + fraction);
|
||||
}
|
||||
} else {
|
||||
mantissa = parseInt(mantissa, 16);
|
||||
}
|
||||
|
||||
return mantissa * (splitIndex !== -1 ? Math.pow(2, exponent) : 1);
|
||||
}
|
||||
49
node_modules/@webassemblyjs/floating-point-hex-parser/lib/index.js
generated
vendored
Normal file
49
node_modules/@webassemblyjs/floating-point-hex-parser/lib/index.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = parse;
|
||||
|
||||
function parse(input) {
|
||||
input = input.toUpperCase();
|
||||
var splitIndex = input.indexOf("P");
|
||||
var mantissa, exponent;
|
||||
|
||||
if (splitIndex !== -1) {
|
||||
mantissa = input.substring(0, splitIndex);
|
||||
exponent = parseInt(input.substring(splitIndex + 1));
|
||||
} else {
|
||||
mantissa = input;
|
||||
exponent = 0;
|
||||
}
|
||||
|
||||
var dotIndex = mantissa.indexOf(".");
|
||||
|
||||
if (dotIndex !== -1) {
|
||||
var integerPart = parseInt(mantissa.substring(0, dotIndex), 16);
|
||||
var sign = Math.sign(integerPart);
|
||||
integerPart = sign * integerPart;
|
||||
var fractionLength = mantissa.length - dotIndex - 1;
|
||||
var fractionalPart = parseInt(mantissa.substring(dotIndex + 1), 16);
|
||||
var fraction = fractionLength > 0 ? fractionalPart / Math.pow(16, fractionLength) : 0;
|
||||
|
||||
if (sign === 0) {
|
||||
if (fraction === 0) {
|
||||
mantissa = sign;
|
||||
} else {
|
||||
if (Object.is(sign, -0)) {
|
||||
mantissa = -fraction;
|
||||
} else {
|
||||
mantissa = fraction;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
mantissa = sign * (integerPart + fraction);
|
||||
}
|
||||
} else {
|
||||
mantissa = parseInt(mantissa, 16);
|
||||
}
|
||||
|
||||
return mantissa * (splitIndex !== -1 ? Math.pow(2, exponent) : 1);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user