Compare commits

..

15 Commits

Author SHA1 Message Date
jhutchings1
526f19341b Update codeql.ts 2020-07-11 13:28:32 -07:00
jhutchings1
ea8c11c723 Update codeql.ts 2020-07-11 13:22:57 -07:00
jhutchings1
df49e473c8 Update codeql.ts 2020-07-11 13:17:46 -07:00
jhutchings1
8d49f67955 Remove try/catch 2020-07-11 13:12:15 -07:00
jhutchings1
dc2f0c2f5f Debug 2020-07-11 13:08:34 -07:00
jhutchings1
5767c8ebfc Update codeql.ts 2020-07-11 12:59:51 -07:00
jhutchings1
fde574ae22 Update codeql.ts 2020-07-11 12:54:20 -07:00
jhutchings1
3b632c3106 Try listener option 2020-07-11 12:51:26 -07:00
jhutchings1
b63d497b0e Test 2020-07-11 12:43:17 -07:00
jhutchings1
f1569d58df Try additional debugging 2020-07-11 12:20:52 -07:00
jhutchings1
f2c8606b17 Add debug statements 2020-07-11 12:18:01 -07:00
jhutchings1
066d9716c6 Fix issue with error detection 2020-07-11 12:13:09 -07:00
jhutchings1
642850bff5 Add special case for "No code seen" error 2020-07-11 12:08:45 -07:00
jhutchings1
5fa6bd2622 Update autobuild and analysis path error messages. 2020-07-11 11:45:36 -07:00
jhutchings1
278f870907 Add help URL to languages error 2020-07-11 11:40:35 -07:00
3454 changed files with 3647 additions and 453492 deletions

View File

@@ -1,18 +0,0 @@
name: "CodeScanning CLI"
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
# Build the CLI
- name: Build CLI
run: npm run build-cli
# Upload an empty SARIF file
- name: Upload with CLI
run: node cli/code-scanning-cli.js upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_API_URL --github-auth ${{ github.token }}

View File

@@ -22,7 +22,7 @@ jobs:
env:
TEST_MODE: true
- run: |
cd "$RUNNER_TEMP/codeql_databases"
cd "$CODEQL_ACTION_DATABASE_DIR"
# List all directories as there will be precisely one directory per database
# but there may be other files in this directory such as query suites.
if [ "$(ls -d */ | wc -l)" != 6 ] || \

2
.gitignore vendored
View File

@@ -1,2 +0,0 @@
/cli/

25
.vscode/launch.json vendored
View File

@@ -1,25 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Debug AVA test file",
"runtimeExecutable": "${workspaceFolder}/node_modules/.bin/ava",
"runtimeArgs": [
"${file}",
"--break",
"--serial",
"--timeout=20m"
],
"port": 9229,
"outputCapture": "std",
"skipFiles": [
"<node_internals>/**/*.js"
]
}
]
}

View File

@@ -1,4 +1,4 @@
# Contributing
## Contributing
[fork]: https://github.com/github/codeql-action/fork
[pr]: https://github.com/github/codeql-action/compare
@@ -20,31 +20,10 @@ Before you start, ensure that you have a recent version of node installed. You c
* Run tests: `npm run test`. Youll need to ensure that the JavaScript files are up-to-date first by running the command above.
* Run the linter: `npm run lint`.
This project also includes configuration to run tests from VSCode (with support for breakpoints) - open the test file you wish to run and choose "Debug AVA test file" from the Run menu in the Run panel.
### Running the action
To see the effect of your changes and to test them, push your changes in a branch and then look at the [Actions output](https://github.com/github/codeql-action/actions) for that branch. You can also exercise the code locally by running the automated tests.
### Running the action locally
It is possible to run this action locally via [act](https://github.com/nektos/act) via the following steps:
1. Create a GitHub [Personal Access Token](https://github.com/settings/tokens) (PAT).
1. Install [act](https://github.com/nektos/act) v0.2.10 or greater.
1. Add a `.env` file in the root of the project you are running:
```bash
CODEQL_LOCAL_RUN=true
# Optional, for better logging
GITHUB_JOB=<ANY_JOB_NAME>
```
1. Run `act -j codeql -s GITHUB_TOKEN=<PAT>`
Running locally will generate the CodeQL database and run all the queries, but it will avoid uploading and reporting results to GitHub. Note that this must be done on a repository that _consumes_ this action, not this repository. The use case is to debug failures of this action on specific repositories.
### Integration tests
As well as the unit tests (see _Common tasks_ above), there are integration tests, defined in `.github/workflows/integration-testing.yml`. These are run by a CI check. Depending on the change youre making, you may want to add a test to this file or extend an existing one.
@@ -56,7 +35,6 @@ As well as the unit tests (see _Common tasks_ above), there are integration test
3. Make your change, add tests, and make sure the tests still pass
4. Push to your fork and [submit a pull request][pr]
5. Pat yourself on the back and wait for your pull request to be reviewed and merged.
If you're a GitHub staff member, you can merge your own PR once it's approved; for external contributors, GitHub staff will merge your PR once it's approved.
Here are a few things you can do that will increase the likelihood of your pull request being accepted:

View File

@@ -19,10 +19,7 @@ inputs:
threads:
description: The number of threads to be used by CodeQL.
required: false
checkout_path:
description: "The path at which the analyzed repository was checked out. Used to relativeize any absolute paths in the uploaded SARIF file."
required: false
default: ${{ github.workspace }}
default: "1"
token:
default: ${{ github.token }}
matrix:

View File

@@ -5,7 +5,7 @@ inputs:
tools:
description: URL of CodeQL tools
required: false
# If not specified the Action will check in several places until it finds the CodeQL tools.
default: https://github.com/github/codeql-action/releases/download/codeql-bundle-20200630/codeql-bundle.tar.gz
languages:
description: The languages to be analysed
required: false

4
lib/analysis-paths.js generated
View File

@@ -23,7 +23,7 @@ function buildIncludeExcludeEnvVar(paths) {
}
return paths.join('\n');
}
function includeAndExcludeAnalysisPaths(config) {
function includeAndExcludeAnalysisPaths(config, languages) {
// The 'LGTM_INDEX_INCLUDE' and 'LGTM_INDEX_EXCLUDE' environment variables
// control which files/directories are traversed when scanning.
// This allows including files that otherwise would not be scanned, or
@@ -51,7 +51,7 @@ function includeAndExcludeAnalysisPaths(config) {
if ((config.paths.length !== 0 ||
config.pathsIgnore.length !== 0 ||
filters.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) {
!languages.every(isInterpretedLanguage)) {
core.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
}
}

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEjD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACrE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACpF;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC;KAC1F;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC/D;IAED,oEAAoE;IACpE,sEAAsE;IACtE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;QACxB,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC;QAC/B,OAAO,CAAC,MAAM,KAAK,CAAC,CAAC;QACvB,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAClD,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC5G;AACH,CAAC;AAjCD,wEAiCC"}
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEjD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACrE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,8BAA8B,CAAC,MAA0B,EAAE,SAAmB;IAC5F,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;KACpF;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC;KAC1F;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC/D;IAED,oEAAoE;IACpE,sEAAsE;IACtE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;QACxB,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC;QAC/B,OAAO,CAAC,MAAM,KAAK,CAAC,CAAC;QACvB,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAC3C,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC5G;AACH,CAAC;AAjCD,wEAiCC"}

View File

@@ -12,43 +12,23 @@ var __importStar = (this && this.__importStar) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const analysisPaths = __importStar(require("./analysis-paths"));
const configUtils = __importStar(require("./config-utils"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
ava_1.default("emptyPaths", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const config = {
languages: [],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: '',
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_FILTERS'], undefined);
});
let config = new configUtils.Config();
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
t.is(process.env['LGTM_INDEX_FILTERS'], undefined);
});
ava_1.default("nonEmptyPaths", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const config = {
languages: [],
queries: {},
paths: ['path1', 'path2', '**/path3'],
pathsIgnore: ['path4', 'path5', 'path6/**'],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: '',
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path4\npath5');
t.is(process.env['LGTM_INDEX_FILTERS'], 'include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**');
});
let config = new configUtils.Config();
config.paths.push('path1', 'path2', '**/path3');
config.pathsIgnore.push('path4', 'path5', 'path6/**');
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path4\npath5');
t.is(process.env['LGTM_INDEX_FILTERS'], 'include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**');
});
//# sourceMappingURL=analysis-paths.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAC1C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAC1C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,gGAAgG,CAAC,CAAC;IAC5I,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,4DAA8C;AAC9C,mDAA2C;AAE3C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;IAChD,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;IACtD,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,gGAAgG,CAAC,CAAC;AAC5I,CAAC,CAAC,CAAC"}

28
lib/api-client.js generated
View File

@@ -13,34 +13,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const github = __importStar(require("@actions/github"));
const console_log_level_1 = __importDefault(require("console-log-level"));
const util_1 = require("./util");
exports.getApiClient = function (githubAuth, githubApiUrl, allowLocalRun = false) {
if (util_1.isLocalRun() && !allowLocalRun) {
throw new Error('Invalid API call in local run');
}
return new github.GitHub({
auth: parseAuth(githubAuth),
baseUrl: githubApiUrl,
exports.getApiClient = function () {
return new github.GitHub(core.getInput('token'), {
userAgent: "CodeQL Action",
log: console_log_level_1.default({ level: "debug" })
});
};
// Parses the user input as either a single token,
// or a username and password / PAT.
function parseAuth(auth) {
// Check if it's a username:password pair
const c = auth.indexOf(':');
if (c !== -1) {
return 'basic ' + Buffer.from(auth).toString('base64');
}
// Otherwise use the token as it is
return auth;
}
// Temporary function to aid in the transition to running on and off of github actions.
// Once all code has been coverted this function should be removed or made canonical
// and called only from the action entrypoints.
function getActionsApiClient(allowLocalRun = false) {
return exports.getApiClient(core.getInput('token'), util_1.getRequiredEnvParam('GITHUB_API_URL'), allowLocalRun);
}
exports.getActionsApiClient = getActionsApiClient;
//# sourceMappingURL=api-client.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAEhD,iCAAyD;AAE5C,QAAA,YAAY,GAAG,UAAS,UAAkB,EAAE,YAAoB,EAAE,aAAa,GAAG,KAAK;IAClG,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,MAAM,CAAC,MAAM,CACtB;QACE,IAAI,EAAE,SAAS,CAAC,UAAU,CAAC;QAC3B,OAAO,EAAE,YAAY;QACrB,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACP,CAAC,CAAC;AAEF,kDAAkD;AAClD,oCAAoC;AACpC,SAAS,SAAS,CAAC,IAAY;IAC7B,yCAAyC;IACzC,MAAM,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5B,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;QACZ,OAAO,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;KACxD;IAED,mCAAmC;IACnC,OAAO,IAAI,CAAC;AACd,CAAC;AAED,uFAAuF;AACvF,oFAAoF;AACpF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,OAAO,oBAAY,CACjB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,0BAAmB,CAAC,gBAAgB,CAAC,EACrC,aAAa,CAAC,CAAC;AACnB,CAAC;AALD,kDAKC"}
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAEnC,QAAA,YAAY,GAAG;IAC1B,OAAO,IAAI,MAAM,CAAC,MAAM,CACtB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB;QACE,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACP,CAAC,CAAC"}

32
lib/autobuild.js generated
View File

@@ -9,35 +9,20 @@ var __importStar = (this && this.__importStar) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const codeql_1 = require("./codeql");
const config_utils = __importStar(require("./config-utils"));
const languages_1 = require("./languages");
const sharedEnv = __importStar(require("./shared-environment"));
const util = __importStar(require("./util"));
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
var _a, _b;
const status = failingLanguage !== undefined || cause !== undefined ? 'failure' : 'success';
const statusReportBase = await util.createStatusReportBase('autobuild', status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
const statusReport = {
...statusReportBase,
autobuild_languages: allLanguages.join(','),
autobuild_failure: failingLanguage,
};
await util.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
let language;
var _a;
try {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('autobuild', 'starting', startedAt), true)) {
if (util.should_abort('autobuild', true) || !await util.reportActionStarting('autobuild')) {
return;
}
const config = await config_utils.getConfig(util.getRequiredEnvParam('RUNNER_TEMP'));
// Attempt to find a language to autobuild
// We want pick the dominant language in the repo from the ones we're able to build
// The languages are sorted in order specified by user or by lines of code if we got
// them from the GitHub API, so try to build the first language on the list.
const autobuildLanguages = config.languages.filter(languages_1.isTracedLanguage);
language = autobuildLanguages[0];
const autobuildLanguages = ((_a = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]) === null || _a === void 0 ? void 0 : _a.split(',')) || [];
const language = autobuildLanguages[0];
if (!language) {
core.info("None of the languages in this project require extra build steps");
return;
@@ -47,17 +32,16 @@ async function run() {
core.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages.slice(1).join(' and ')}, you must replace this block with custom build steps.`);
}
core.startGroup(`Attempting to automatically build ${language} code`);
const codeQL = codeql_1.getCodeQL(config.codeQLCmd);
const codeQL = codeql_1.getCodeQL();
await codeQL.runAutobuild(language);
core.endGroup();
}
catch (error) {
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
console.log(error);
await sendCompletedStatusReport(startedAt, [language], language, error);
await util.reportActionFailed('autobuild', error.message, error.stack);
return;
}
await sendCompletedStatusReport(startedAt, [language]);
await util.reportActionSucceeded('autobuild');
}
run().catch(e => {
core.setFailed("autobuild action failed. " + e);

View File

@@ -1 +1 @@
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAqC;AACrC,6DAA+C;AAC/C,2CAA+C;AAC/C,6CAA+B;AAS/B,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GAAG,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5F,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CAAC,CAAC;IAChB,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,CAAC;IACb,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC7G,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CAAC,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC,CAAC;QAErF,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,4BAAgB,CAAC,CAAC;QACrE,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEjC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;QAC3C,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;QACxE,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAqC;AACrC,gEAAkD;AAClD,6CAA+B;AAE/B,KAAK,UAAU,GAAG;;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,EAAE;YACzF,OAAO;SACR;QAED,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,OAAA,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,8BAA8B,CAAC,0CAAE,KAAK,CAAC,GAAG,MAAK,EAAE,CAAC;QACnG,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,MAAM,IAAI,CAAC,kBAAkB,CAAC,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACvE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,CAAC;AAChD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

57
lib/cli.js generated
View File

@@ -1,57 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const commander_1 = require("commander");
const path = __importStar(require("path"));
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const program = new commander_1.Command();
program.version('0.0.1');
function parseGithubApiUrl(inputUrl) {
try {
const url = new URL(inputUrl);
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === 'github.com' || url.hostname === 'api.github.com') {
return 'https://api.github.com';
}
// Add the API path if it's not already present.
if (url.pathname.indexOf('/api/v3') === -1) {
url.pathname = path.join(url.pathname, 'api', 'v3');
}
return url.toString();
}
catch (e) {
throw new Error(`"${inputUrl}" is not a valid URL`);
}
}
const logger = logging_1.getCLILogger();
program
.command('upload')
.description('Uploads a SARIF file, or all SARIF files from a directory, to code scanning')
.requiredOption('--sarif-file <file>', 'SARIF file to upload; can also be a directory for uploading multiple')
.requiredOption('--repository <repository>', 'Repository name')
.requiredOption('--commit <commit>', 'SHA of commit that was analyzed')
.requiredOption('--ref <ref>', 'Name of ref that was analyzed')
.requiredOption('--github-url <url>', 'URL of GitHub instance')
.requiredOption('--github-auth <auth>', 'GitHub Apps token, or of the form "username:token" if using a personal access token')
.option('--checkout-path <path>', 'Checkout path (default: current working directory)')
.action(async (cmd) => {
try {
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, cmd.ref, undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubApiUrl(cmd.githubUrl), 'cli', logger);
}
catch (e) {
logger.error('Upload failed');
logger.error(e);
process.exitCode = 1;
}
});
program.parse(process.argv);
//# sourceMappingURL=cli.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"cli.js","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";;;;;;;;;AAAA,yCAAoC;AACpC,2CAA6B;AAE7B,uCAAyC;AACzC,6CAAkD;AAClD,yDAA2C;AAE3C,MAAM,OAAO,GAAG,IAAI,mBAAO,EAAE,CAAC;AAC9B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAYzB,SAAS,iBAAiB,CAAC,QAAgB;IACzC,IAAI;QACF,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAC;QAE9B,kDAAkD;QAClD,0CAA0C;QAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;YACtE,OAAO,wBAAwB,CAAC;SACjC;QAED,gDAAgD;QAChD,IAAI,GAAG,CAAC,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE;YAC1C,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;SACrD;QAED,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;KAEvB;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,IAAI,QAAQ,sBAAsB,CAAC,CAAC;KACrD;AACH,CAAC;AAED,MAAM,MAAM,GAAG,sBAAY,EAAE,CAAC;AAE9B,OAAO;KACJ,OAAO,CAAC,QAAQ,CAAC;KACjB,WAAW,CAAC,6EAA6E,CAAC;KAC1F,cAAc,CAAC,qBAAqB,EAAE,sEAAsE,CAAC;KAC7G,cAAc,CAAC,2BAA2B,EAAE,iBAAiB,CAAC;KAC9D,cAAc,CAAC,mBAAmB,EAAE,iCAAiC,CAAC;KACtE,cAAc,CAAC,aAAa,EAAE,+BAA+B,CAAC;KAC9D,cAAc,CAAC,oBAAoB,EAAE,wBAAwB,CAAC;KAC9D,cAAc,CAAC,sBAAsB,EAAE,qFAAqF,CAAC;KAC7H,MAAM,CAAC,wBAAwB,EAAE,oDAAoD,CAAC;KACtF,MAAM,CAAC,KAAK,EAAE,GAAe,EAAE,EAAE;IAChC,IAAI;QACF,MAAM,UAAU,CAAC,MAAM,CACrB,GAAG,CAAC,SAAS,EACb,+BAAkB,CAAC,GAAG,CAAC,UAAU,CAAC,EAClC,GAAG,CAAC,MAAM,EACV,GAAG,CAAC,GAAG,EACP,SAAS,EACT,SAAS,EACT,SAAS,EACT,GAAG,CAAC,YAAY,IAAI,OAAO,CAAC,GAAG,EAAE,EACjC,SAAS,EACT,GAAG,CAAC,UAAU,EACd,iBAAiB,CAAC,GAAG,CAAC,SAAS,CAAC,EAChC,KAAK,EACL,MAAM,CAAC,CAAC;KACX;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC;QAC9B,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QAChB,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC;KACtB;AACH,CAAC,CAAC,CAAC;AAEL,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC"}

245
lib/codeql.js generated
View File

@@ -6,127 +6,29 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const http = __importStar(require("@actions/http-client"));
const toolcache = __importStar(require("@actions/tool-cache"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const semver = __importStar(require("semver"));
const stream = __importStar(require("stream"));
const globalutil = __importStar(require("util"));
const v4_1 = __importDefault(require("uuid/v4"));
const api = __importStar(require("./api-client"));
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
const util = __importStar(require("./util"));
/**
* Stores the CodeQL object, and is populated by `setupCodeQL` or `getCodeQL`.
* Can be overridden in tests using `setCodeQL`.
* Environment variable used to store the location of the CodeQL CLI executable.
* Value is set by setupCodeQL and read by getCodeQL.
*/
let cachedCodeQL = undefined;
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
const CODEQL_BUNDLE_NAME = "codeql-bundle.tar.gz";
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
function getCodeQLActionRepository() {
// Actions do not know their own repository name,
// so we currently use this hack to find the name based on where our files are.
// This can be removed once the change to the runner in https://github.com/actions/runner/pull/585 is deployed.
const runnerTemp = util.getRequiredEnvParam("RUNNER_TEMP");
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
const relativeScriptPath = path.relative(actionsDirectory, __filename);
// This handles the case where the Action does not come from an Action repository,
// e.g. our integration tests which use the Action code from the current checkout.
if (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath)) {
return CODEQL_DEFAULT_ACTION_REPOSITORY;
}
const relativeScriptPathParts = relativeScriptPath.split(path.sep);
return relativeScriptPathParts[0] + "/" + relativeScriptPathParts[1];
}
async function getCodeQLBundleDownloadURL() {
const codeQLActionRepository = getCodeQLActionRepository();
const potentialDownloadSources = [
// This GitHub instance, and this Action.
[util.getInstanceAPIURL(), codeQLActionRepository],
// This GitHub instance, and the canonical Action.
[util.getInstanceAPIURL(), CODEQL_DEFAULT_ACTION_REPOSITORY],
// GitHub.com, and the canonical Action.
[util.GITHUB_DOTCOM_API_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
];
// We now filter out any duplicates.
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url));
for (let downloadSource of uniqueDownloadSources) {
let [apiURL, repository] = downloadSource;
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
if (apiURL === util.GITHUB_DOTCOM_API_URL && repository === CODEQL_DEFAULT_ACTION_REPOSITORY) {
break;
}
let [repositoryOwner, repositoryName] = repository.split("/");
try {
const release = await api.getActionsApiClient().repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: CODEQL_BUNDLE_VERSION
});
for (let asset of release.data.assets) {
if (asset.name === CODEQL_BUNDLE_NAME) {
core.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
return asset.url;
}
}
}
catch (e) {
core.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
}
}
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${CODEQL_BUNDLE_NAME}`;
}
// We have to download CodeQL manually because the toolcache doesn't support Accept headers.
// This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released.
async function toolcacheDownloadTool(url, headers) {
const client = new http.HttpClient('CodeQL Action');
const dest = path.join(util.getRequiredEnvParam('RUNNER_TEMP'), v4_1.default());
const response = await client.get(url, headers);
if (response.message.statusCode !== 200) {
const err = new toolcache.HTTPError(response.message.statusCode);
core.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
throw err;
}
const pipeline = globalutil.promisify(stream.pipeline);
fs.mkdirSync(path.dirname(dest), { recursive: true });
await pipeline(response.message, fs.createWriteStream(dest));
return dest;
}
const CODEQL_ACTION_CMD = "CODEQL_ACTION_CMD";
async function setupCodeQL() {
try {
let codeqlURL = core.getInput('tools');
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
const codeqlURL = core.getInput('tools', { required: true });
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL);
let codeqlFolder = toolcache.find('CodeQL', codeqlURLVersion);
if (codeqlFolder) {
core.debug(`CodeQL found in cache ${codeqlFolder}`);
}
else {
if (!codeqlURL) {
codeqlURL = await getCodeQLBundleDownloadURL();
}
const headers = { accept: 'application/octet-stream' };
// We only want to provide an authorization header if we are downloading
// from the same GitHub instance the Action is running on.
// This avoids leaking Enterprise tokens to dotcom.
if (codeqlURL.startsWith(util.getInstanceAPIURL() + "/")) {
core.debug('Downloading CodeQL bundle with token.');
let token = core.getInput('token', { required: true });
headers.authorization = `token ${token}`;
}
else {
core.debug('Downloading CodeQL bundle without token.');
}
let codeqlPath = await toolcacheDownloadTool(codeqlURL, headers);
core.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
const codeqlPath = await toolcache.downloadTool(codeqlURL);
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', codeqlURLVersion);
}
@@ -135,10 +37,10 @@ async function setupCodeQL() {
codeqlCmd += ".exe";
}
else if (process.platform !== 'linux' && process.platform !== 'darwin') {
throw new Error("Unsupported platform: " + process.platform);
throw new Error("Unsupported plaform: " + process.platform);
}
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
return cachedCodeQL;
core.exportVariable(CODEQL_ACTION_CMD, codeqlCmd);
return getCodeQLForCmd(codeqlCmd);
}
catch (e) {
core.error(e);
@@ -163,67 +65,15 @@ function getCodeQLURLVersion(url) {
return s;
}
exports.getCodeQLURLVersion = getCodeQLURLVersion;
/**
* Use the CodeQL executable located at the given path.
*/
function getCodeQL(cmd) {
if (cachedCodeQL === undefined) {
cachedCodeQL = getCodeQLForCmd(cmd);
}
return cachedCodeQL;
function getCodeQL() {
const codeqlCmd = util.getRequiredEnvParam(CODEQL_ACTION_CMD);
return getCodeQLForCmd(codeqlCmd);
}
exports.getCodeQL = getCodeQL;
function resolveFunction(partialCodeql, methodName, defaultImplementation) {
if (typeof partialCodeql[methodName] !== 'function') {
if (defaultImplementation !== undefined) {
return defaultImplementation;
}
const dummyMethod = () => {
throw new Error('CodeQL ' + methodName + ' method not correctly defined');
};
return dummyMethod;
}
return partialCodeql[methodName];
}
/**
* Set the functionality for CodeQL methods. Only for use in tests.
*
* Accepts a partial object and any undefined methods will be implemented
* to immediately throw an exception indicating which method is missing.
*/
function setCodeQL(partialCodeql) {
cachedCodeQL = {
getPath: resolveFunction(partialCodeql, 'getPath', () => '/tmp/dummy-path'),
printVersion: resolveFunction(partialCodeql, 'printVersion'),
getTracerEnv: resolveFunction(partialCodeql, 'getTracerEnv'),
databaseInit: resolveFunction(partialCodeql, 'databaseInit'),
runAutobuild: resolveFunction(partialCodeql, 'runAutobuild'),
extractScannedLanguage: resolveFunction(partialCodeql, 'extractScannedLanguage'),
finalizeDatabase: resolveFunction(partialCodeql, 'finalizeDatabase'),
resolveQueries: resolveFunction(partialCodeql, 'resolveQueries'),
databaseAnalyze: resolveFunction(partialCodeql, 'databaseAnalyze')
};
return cachedCodeQL;
}
exports.setCodeQL = setCodeQL;
/**
* Get the cached CodeQL object. Should only be used from tests.
*
* TODO: Work out a good way for tests to get this from the test context
* instead of having to have this method.
*/
function getCachedCodeQL() {
if (cachedCodeQL === undefined) {
// Should never happen as setCodeQL is called by testing-utils.setupTests
throw new Error('cachedCodeQL undefined');
}
return cachedCodeQL;
}
exports.getCachedCodeQL = getCachedCodeQL;
function getCodeQLForCmd(cmd) {
return {
getPath: function () {
return cmd;
getDir: function () {
return path.dirname(cmd);
},
printVersion: async function () {
await exec.exec(cmd, [
@@ -239,7 +89,6 @@ function getCodeQLForCmd(cmd) {
'trace-command',
databasePath,
...compilerSpecArg,
...getExtraOptionsFromEnv(['database', 'trace-command']),
process.execPath,
path.resolve(__dirname, 'tracer-env.js'),
envFile
@@ -253,7 +102,6 @@ function getCodeQLForCmd(cmd) {
databasePath,
'--language=' + language,
'--source-root=' + sourceRoot,
...getExtraOptionsFromEnv(['database', 'init']),
]);
},
runAutobuild: async function (language) {
@@ -275,8 +123,7 @@ function getCodeQLForCmd(cmd) {
'resolve',
'extractor',
'--format=json',
'--language=' + language,
...getExtraOptionsFromEnv(['resolve', 'extractor']),
'--language=' + language
], {
silent: true,
listeners: {
@@ -291,7 +138,6 @@ function getCodeQLForCmd(cmd) {
await exec.exec(cmd, [
'database',
'trace-command',
...getExtraOptionsFromEnv(['database', 'trace-command']),
databasePath,
'--',
traceCommand
@@ -301,23 +147,17 @@ function getCodeQLForCmd(cmd) {
await exec.exec(cmd, [
'database',
'finalize',
...getExtraOptionsFromEnv(['database', 'finalize']),
databasePath
]);
},
resolveQueries: async function (queries, extraSearchPath) {
const codeqlArgs = [
resolveQueries: async function (queries) {
let output = '';
await exec.exec(cmd, [
'resolve',
'queries',
...queries,
'--format=bylanguage',
...getExtraOptionsFromEnv(['resolve', 'queries'])
];
if (extraSearchPath !== undefined) {
codeqlArgs.push('--search-path', extraSearchPath);
}
let output = '';
await exec.exec(cmd, codeqlArgs, {
'--format=bylanguage'
], {
listeners: {
stdout: (data) => {
output += data.toString();
@@ -336,54 +176,9 @@ function getCodeQLForCmd(cmd) {
'--format=sarif-latest',
'--output=' + sarifFile,
'--no-sarif-add-snippets',
...getExtraOptionsFromEnv(['database', 'analyze']),
querySuite
]);
}
};
}
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*/
function getExtraOptionsFromEnv(path) {
let options = util.getExtraOptionsEnvParam();
return getExtraOptions(options, path, []);
}
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*
* - the special terminal step name '*' in `options` matches all path steps
* - throws an exception if this conversion is impossible.
*/
function getExtraOptions(options, path, pathInfo) {
var _a, _b, _c;
/**
* Gets `options` as an array of extra option strings.
*
* - throws an exception mentioning `pathInfo` if this conversion is impossible.
*/
function asExtraOptions(options, pathInfo) {
if (options === undefined) {
return [];
}
if (!Array.isArray(options)) {
const msg = `The extra options for '${pathInfo.join('.')}' ('${JSON.stringify(options)}') are not in an array.`;
throw new Error(msg);
}
return options.map(o => {
const t = typeof o;
if (t !== 'string' && t !== 'number' && t !== 'boolean') {
const msg = `The extra option for '${pathInfo.join('.')}' ('${JSON.stringify(o)}') is not a primitive value.`;
throw new Error(msg);
}
return o + '';
});
}
let all = asExtraOptions((_a = options) === null || _a === void 0 ? void 0 : _a['*'], pathInfo.concat('*'));
let specific = path.length === 0 ?
asExtraOptions(options, pathInfo) :
getExtraOptions((_b = options) === null || _b === void 0 ? void 0 : _b[path[0]], (_c = path) === null || _c === void 0 ? void 0 : _c.slice(1), pathInfo.concat(path[0]));
return all.concat(specific);
}
exports.getExtraOptions = getExtraOptions;
//# sourceMappingURL=codeql.js.map

File diff suppressed because one or more lines are too long

22
lib/codeql.test.js generated
View File

@@ -57,26 +57,4 @@ ava_1.default('parse codeql bundle url version', t => {
}
}
});
ava_1.default('getExtraOptions works for explicit paths', t => {
t.deepEqual(codeql.getExtraOptions({}, ['foo'], []), []);
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ['foo'], []), ['42']);
t.deepEqual(codeql.getExtraOptions({ foo: { bar: [42] } }, ['foo', 'bar'], []), ['42']);
});
ava_1.default('getExtraOptions works for wildcards', t => {
t.deepEqual(codeql.getExtraOptions({ '*': [42] }, ['foo'], []), ['42']);
});
ava_1.default('getExtraOptions works for wildcards and explicit paths', t => {
let o1 = { '*': [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o1, ['foo'], []), ['42', '87']);
let o2 = { '*': [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o2, ['foo', 'bar'], []), ['42']);
let o3 = { '*': [42], foo: { '*': [87], bar: [99] } };
let p = ['foo', 'bar'];
t.deepEqual(codeql.getExtraOptions(o3, p, []), ['42', '87', '99']);
});
ava_1.default('getExtraOptions throws for bad content', t => {
t.throws(() => codeql.getExtraOptions({ '*': 42 }, ['foo'], []));
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ['foo'], []));
t.throws(() => codeql.getExtraOptions({ '*': [42], foo: { '*': 87, bar: [99] } }, ['foo', 'bar'], []));
});
//# sourceMappingURL=codeql.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"codeql.test.js","sourceRoot":"","sources":["../src/codeql.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,iDAAmC;AACnC,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,MAAM,CAAC,WAAW,EAAE,CAAC;YAE3B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,MAAM,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YACtD,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,0CAA0C,EAAE,CAAC,CAAC,EAAE;IACnD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;IAEzD,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;IAEtE,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AACtF,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE;IAC9C,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;AACxE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,wDAAwD,EAAE,CAAC,CAAC,EAAE;IACjE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,CAAC;IAChC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;IAEnE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,CAAC;IAChC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC;IAEpE,IAAI,EAAE,GAAG,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,CAAC;IACnD,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;IACvB,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,eAAe,CAAC,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC;AACrE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,wCAAwC,EAAE,CAAC,CAAC,EAAE;IACjD,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAE,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAE/D,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,EAAE,EAAC,EAAE,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IAE/D,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,eAAe,CAAC,EAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,EAAE,EAAE,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,EAAC,EAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;AACtG,CAAC,CAAC,CAAC"}
{"version":3,"file":"codeql.test.js","sourceRoot":"","sources":["../src/codeql.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,+DAAiD;AACjD,8CAAuB;AACvB,gDAAwB;AACxB,2CAA6B;AAE7B,iDAAmC;AACnC,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,8BAA8B,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAE7C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QAEnC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QAEzC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAE9D,MAAM,QAAQ,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAE1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAE5B,cAAI,CAAC,qBAAqB,CAAC;iBACxB,GAAG,CAAC,2BAA2B,OAAO,uBAAuB,CAAC;iBAC9D,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,uCAAuC,CAAC,CAAC,CAAC;YAGrF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,8CAA8C,OAAO,uBAAuB,CAAC;YAE1G,MAAM,MAAM,CAAC,WAAW,EAAE,CAAC;YAE3B,CAAC,CAAC,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,SAAS,OAAO,EAAE,CAAC,CAAC,CAAC;SACxD;QAED,MAAM,cAAc,GAAG,SAAS,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;QAE3D,CAAC,CAAC,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAE1C,MAAM,KAAK,GAAG;QACZ,UAAU,EAAE,gBAAgB;QAC5B,YAAY,EAAE,kBAAkB;QAChC,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,OAAO;QAChB,aAAa,EAAE,aAAa;QAC5B,cAAc,EAAE,cAAc;KAC/B,CAAC;IAEF,KAAK,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAC9D,MAAM,GAAG,GAAG,wCAAwC,OAAO,MAAM,CAAC;QAElE,IAAI;YACF,MAAM,aAAa,GAAG,MAAM,CAAC,mBAAmB,CAAC,GAAG,CAAC,CAAC;YACtD,CAAC,CAAC,SAAS,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC;SAC7C;QAAC,OAAO,CAAC,EAAE;YACV,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;SACnB;KACF;AACH,CAAC,CAAC,CAAC"}

466
lib/config-utils.js generated
View File

@@ -8,171 +8,103 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const io = __importStar(require("@actions/io"));
const fs = __importStar(require("fs"));
const yaml = __importStar(require("js-yaml"));
const path = __importStar(require("path"));
const api = __importStar(require("./api-client"));
const externalQueries = __importStar(require("./external-queries"));
const languages_1 = require("./languages");
const util = __importStar(require("./util"));
// Property names from the user-supplied config file.
const NAME_PROPERTY = 'name';
const DISABLE_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
const DISPLAY_DEFAULT_QUERIES_PROPERTY = 'disable-default-queries';
const QUERIES_PROPERTY = 'queries';
const QUERIES_USES_PROPERTY = 'uses';
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
const PATHS_PROPERTY = 'paths';
/**
* A list of queries from https://github.com/github/codeql that
* we don't want to run. Disabling them here is a quicker alternative to
* disabling them in the code scanning query suites. Queries should also
* be disabled in the suites, and removed from this list here once the
* bundle is updated to make those suite changes live.
*
* Format is a map from language to an array of path suffixes of .ql files.
*/
const DISABLED_BUILTIN_QUERIES = {
'csharp': [
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
]
};
function queryIsDisabled(language, query) {
return (DISABLED_BUILTIN_QUERIES[language] || [])
.some(disabledQuery => query.endsWith(disabledQuery));
}
/**
* Asserts that the noDeclaredLanguage and multipleDeclaredLanguages fields are
* both empty and errors if they are not.
*/
function validateQueries(resolvedQueries) {
const noDeclaredLanguage = resolvedQueries.noDeclaredLanguage;
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
if (noDeclaredLanguageQueries.length !== 0) {
throw new Error('The following queries do not declare a language. ' +
'Their qlpack.yml files are either missing or is invalid.\n' +
noDeclaredLanguageQueries.join('\n'));
}
const multipleDeclaredLanguages = resolvedQueries.multipleDeclaredLanguages;
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
if (multipleDeclaredLanguagesQueries.length !== 0) {
throw new Error('The following queries declare multiple languages. ' +
'Their qlpack.yml files are either missing or is invalid.\n' +
multipleDeclaredLanguagesQueries.join('\n'));
class ExternalQuery {
constructor(repository, ref) {
this.path = '';
this.repository = repository;
this.ref = ref;
}
}
/**
* Run 'codeql resolve queries' and add the results to resultMap
*/
async function runResolveQueries(codeQL, resultMap, toResolve, extraSearchPath, errorOnInvalidQueries) {
const resolvedQueries = await codeQL.resolveQueries(toResolve, extraSearchPath);
for (const [language, queries] of Object.entries(resolvedQueries.byLanguage)) {
if (resultMap[language] === undefined) {
resultMap[language] = [];
}
resultMap[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
}
if (errorOnInvalidQueries) {
validateQueries(resolvedQueries);
}
}
/**
* Get the set of queries included by default.
*/
async function addDefaultQueries(codeQL, languages, resultMap) {
const suites = languages.map(l => l + '-code-scanning.qls');
await runResolveQueries(codeQL, resultMap, suites, undefined, false);
}
exports.ExternalQuery = ExternalQuery;
// The set of acceptable values for built-in suites from the codeql bundle
const builtinSuites = ['security-extended', 'security-and-quality'];
/**
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
* Throws an error if suiteName is not a valid builtin suite.
*/
async function addBuiltinSuiteQueries(configFile, languages, codeQL, resultMap, suiteName) {
const suite = builtinSuites.find((suite) => suite === suiteName);
if (!suite) {
throw new Error(getQueryUsesInvalid(configFile, suiteName));
class Config {
constructor() {
this.name = "";
this.disableDefaultQueries = false;
this.additionalQueries = [];
this.externalQueries = [];
this.additionalSuites = [];
this.pathsIgnore = [];
this.paths = [];
}
addQuery(configFile, queryUses) {
// The logic for parsing the string is based on what actions does for
// parsing the 'uses' actions in the workflow file
queryUses = queryUses.trim();
if (queryUses === "") {
throw new Error(getQueryUsesInvalid(configFile));
}
// Check for the local path case before we start trying to parse the repository name
if (queryUses.startsWith("./")) {
const localQueryPath = queryUses.slice(2);
// Resolve the local path against the workspace so that when this is
// passed to codeql it resolves to exactly the path we expect it to resolve to.
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
// Check the file exists
if (!fs.existsSync(absoluteQueryPath)) {
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
}
// Call this after checking file exists, because it'll fail if file doesn't exist
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
// Check the local path doesn't jump outside the repo using '..' or symlinks
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
}
this.additionalQueries.push(absoluteQueryPath);
return;
}
// Check for one of the builtin suites
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
const suite = builtinSuites.find((suite) => suite === queryUses);
if (suite) {
this.additionalSuites.push(suite);
return;
}
else {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
}
let tok = queryUses.split('@');
if (tok.length !== 2) {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
const ref = tok[1];
tok = tok[0].split('/');
// The first token is the owner
// The second token is the repo
// The rest is a path, if there is more than one token combine them to form the full path
if (tok.length < 2) {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
if (tok.length > 3) {
tok = [tok[0], tok[1], tok.slice(2).join('/')];
}
// Check none of the parts of the repository name are empty
if (tok[0].trim() === '' || tok[1].trim() === '') {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
let external = new ExternalQuery(tok[0] + '/' + tok[1], ref);
if (tok.length === 3) {
external.path = tok[2];
}
this.externalQueries.push(external);
}
const suites = languages.map(l => l + '-' + suiteName + '.qls');
await runResolveQueries(codeQL, resultMap, suites, undefined, false);
}
/**
* Retrieve the set of queries at localQueryPath and add them to resultMap.
*/
async function addLocalQueries(configFile, codeQL, resultMap, localQueryPath) {
// Resolve the local path against the workspace so that when this is
// passed to codeql it resolves to exactly the path we expect it to resolve to.
const workspacePath = fs.realpathSync(util.getRequiredEnvParam('GITHUB_WORKSPACE'));
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
// Check the file exists
if (!fs.existsSync(absoluteQueryPath)) {
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
}
// Call this after checking file exists, because it'll fail if file doesn't exist
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
// Check the local path doesn't jump outside the repo using '..' or symlinks
if (!(absoluteQueryPath + path.sep).startsWith(workspacePath + path.sep)) {
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
}
// Get the root of the current repo to use when resolving query dependencies
const rootOfRepo = util.getRequiredEnvParam('GITHUB_WORKSPACE');
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], rootOfRepo, true);
}
/**
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
*/
async function addRemoteQueries(configFile, codeQL, resultMap, queryUses, tempDir) {
let tok = queryUses.split('@');
if (tok.length !== 2) {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
const ref = tok[1];
tok = tok[0].split('/');
// The first token is the owner
// The second token is the repo
// The rest is a path, if there is more than one token combine them to form the full path
if (tok.length < 2) {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
// Check none of the parts of the repository name are empty
if (tok[0].trim() === '' || tok[1].trim() === '') {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
}
const nwo = tok[0] + '/' + tok[1];
// Checkout the external repository
const rootOfRepo = await externalQueries.checkoutExternalRepository(nwo, ref, tempDir);
const queryPath = tok.length > 2
? path.join(rootOfRepo, tok.slice(2).join('/'))
: rootOfRepo;
await runResolveQueries(codeQL, resultMap, [queryPath], rootOfRepo, true);
}
/**
* Parse a query 'uses' field to a discrete set of query files and update resultMap.
*
* The logic for parsing the string is based on what actions does for
* parsing the 'uses' actions in the workflow file. So it can handle
* local paths starting with './', or references to remote repos, or
* a finite set of hardcoded terms for builtin suites.
*/
async function parseQueryUses(configFile, languages, codeQL, resultMap, queryUses, tempDir) {
queryUses = queryUses.trim();
if (queryUses === "") {
throw new Error(getQueryUsesInvalid(configFile));
}
// Check for the local path case before we start trying to parse the repository name
if (queryUses.startsWith("./")) {
await addLocalQueries(configFile, codeQL, resultMap, queryUses.slice(2));
return;
}
// Check for one of the builtin suites
if (queryUses.indexOf('/') === -1 && queryUses.indexOf('@') === -1) {
await addBuiltinSuiteQueries(configFile, languages, codeQL, resultMap, queryUses);
return;
}
// Otherwise, must be a reference to another repo
await addRemoteQueries(configFile, codeQL, resultMap, queryUses, tempDir);
}
exports.Config = Config;
// Regex validating stars in paths or paths-ignore entries.
// The intention is to only allow ** to appear when immediately
// preceded and followed by a slash.
@@ -224,7 +156,7 @@ function getNameInvalid(configFile) {
}
exports.getNameInvalid = getNameInvalid;
function getDisableDefaultQueriesInvalid(configFile) {
return getConfigFilePropertyError(configFile, DISABLE_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
return getConfigFilePropertyError(configFile, DISPLAY_DEFAULT_QUERIES_PROPERTY, 'must be a boolean');
}
exports.getDisableDefaultQueriesInvalid = getDisableDefaultQueriesInvalid;
function getQueriesInvalid(configFile) {
@@ -278,114 +210,14 @@ exports.getConfigFileDirectoryGivenMessage = getConfigFileDirectoryGivenMessage;
function getConfigFilePropertyError(configFile, property, error) {
return 'The configuration file "' + configFile + '" is invalid: property "' + property + '" ' + error;
}
function getNoLanguagesError() {
return "Did not detect any languages to analyze. " +
"Please update input in workflow or check that GitHub detects the correct languages in your repository.";
}
exports.getNoLanguagesError = getNoLanguagesError;
function getUnknownLanguagesError(languages) {
return "Did not recognise the following languages: " + languages.join(', ');
}
exports.getUnknownLanguagesError = getUnknownLanguagesError;
/**
* Gets the set of languages in the current repository
*/
async function getLanguagesInRepo() {
var _a;
let repo_nwo = (_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.split("/");
if (repo_nwo) {
let owner = repo_nwo[0];
let repo = repo_nwo[1];
core.debug(`GitHub repo ${owner} ${repo}`);
const response = await api.getActionsApiClient(true).repos.listLanguages({
owner,
repo
});
core.debug("Languages API response: " + JSON.stringify(response));
// The GitHub API is going to return languages in order of popularity,
// When we pick a language to autobuild we want to pick the most popular traced language
// Since sets in javascript maintain insertion order, using a set here and then splatting it
// into an array gives us an array of languages ordered by popularity
let languages = new Set();
for (let lang of Object.keys(response.data)) {
let parsedLang = languages_1.parseLanguage(lang);
if (parsedLang !== undefined) {
languages.add(parsedLang);
}
}
return [...languages];
async function initConfig() {
let configFile = core.getInput('config-file');
const config = new Config();
// If no config file was provided create an empty one
if (configFile === '') {
core.debug('No configuration file was provided');
return config;
}
else {
return [];
}
}
/**
* Get the languages to analyse.
*
* The result is obtained from the action input parameter 'languages' if that
* has been set, otherwise it is deduced as all languages in the repo that
* can be analysed.
*
* If no languages could be detected from either the workflow or the repository
* then throw an error.
*/
async function getLanguages() {
// Obtain from action input 'languages' if set
let languages = core.getInput('languages', { required: false })
.split(',')
.map(x => x.trim())
.filter(x => x.length > 0);
core.info("Languages from configuration: " + JSON.stringify(languages));
if (languages.length === 0) {
// Obtain languages as all languages in the repo that can be analysed
languages = await getLanguagesInRepo();
core.info("Automatically detected languages: " + JSON.stringify(languages));
}
// If the languages parameter was not given and no languages were
// detected then fail here as this is a workflow configuration error.
if (languages.length === 0) {
throw new Error(getNoLanguagesError());
}
// Make sure they are supported
const parsedLanguages = [];
const unknownLanguages = [];
for (let language of languages) {
const parsedLanguage = languages_1.parseLanguage(language);
if (parsedLanguage === undefined) {
unknownLanguages.push(language);
}
else if (parsedLanguages.indexOf(parsedLanguage) === -1) {
parsedLanguages.push(parsedLanguage);
}
}
if (unknownLanguages.length > 0) {
throw new Error(getUnknownLanguagesError(unknownLanguages));
}
return parsedLanguages;
}
/**
* Get the default config for when the user has not supplied one.
*/
async function getDefaultConfig(tempDir, toolCacheDir, codeQL) {
const languages = await getLanguages();
const queries = {};
await addDefaultQueries(codeQL, languages, queries);
return {
languages: languages,
queries: queries,
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(),
};
}
exports.getDefaultConfig = getDefaultConfig;
/**
* Load the config from the given file.
*/
async function loadConfig(configFile, tempDir, toolCacheDir, codeQL) {
let parsedYAML;
if (isLocal(configFile)) {
// Treat the config file as relative to the workspace
@@ -396,8 +228,6 @@ async function loadConfig(configFile, tempDir, toolCacheDir, codeQL) {
else {
parsedYAML = await getRemoteConfig(configFile);
}
// Validate that the 'name' property is syntactically correct,
// even though we don't use the value yet.
if (NAME_PROPERTY in parsedYAML) {
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
throw new Error(getNameInvalid(configFile));
@@ -405,31 +235,24 @@ async function loadConfig(configFile, tempDir, toolCacheDir, codeQL) {
if (parsedYAML[NAME_PROPERTY].length === 0) {
throw new Error(getNameInvalid(configFile));
}
config.name = parsedYAML[NAME_PROPERTY];
}
const languages = await getLanguages();
const queries = {};
const pathsIgnore = [];
const paths = [];
let disableDefaultQueries = false;
if (DISABLE_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
if (typeof parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
if (DISPLAY_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
if (typeof parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
throw new Error(getDisableDefaultQueriesInvalid(configFile));
}
disableDefaultQueries = parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY];
}
if (!disableDefaultQueries) {
await addDefaultQueries(codeQL, languages, queries);
config.disableDefaultQueries = parsedYAML[DISPLAY_DEFAULT_QUERIES_PROPERTY];
}
if (QUERIES_PROPERTY in parsedYAML) {
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
throw new Error(getQueriesInvalid(configFile));
}
for (const query of parsedYAML[QUERIES_PROPERTY]) {
parsedYAML[QUERIES_PROPERTY].forEach(query => {
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile));
}
await parseQueryUses(configFile, languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir);
}
config.addQuery(configFile, query[QUERIES_USES_PROPERTY]);
});
}
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
@@ -439,7 +262,7 @@ async function loadConfig(configFile, tempDir, toolCacheDir, codeQL) {
if (typeof path !== "string" || path === '') {
throw new Error(getPathsIgnoreInvalid(configFile));
}
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile));
config.pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile));
});
}
if (PATHS_PROPERTY in parsedYAML) {
@@ -450,50 +273,11 @@ async function loadConfig(configFile, tempDir, toolCacheDir, codeQL) {
if (typeof path !== "string" || path === '') {
throw new Error(getPathsInvalid(configFile));
}
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile));
config.paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile));
});
}
// The list of queries should not be empty for any language. If it is then
// it is a user configuration error.
for (const language of languages) {
if (queries[language] === undefined || queries[language].length === 0) {
throw new Error(`Did not detect any queries to run for ${language}. ` +
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
}
}
return {
languages,
queries,
pathsIgnore,
paths,
originalUserInput: parsedYAML,
tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(),
};
}
/**
* Load and return the config.
*
* This will parse the config from the user input if present, or generate
* a default config. The parsed config is then stored to a known location.
*/
async function initConfig(tempDir, toolCacheDir, codeQL) {
const configFile = core.getInput('config-file');
let config;
// If no config file was provided create an empty one
if (configFile === '') {
core.debug('No configuration file was provided');
config = await getDefaultConfig(tempDir, toolCacheDir, codeQL);
}
else {
config = await loadConfig(configFile, tempDir, toolCacheDir, codeQL);
}
// Save the config so we can easily access it again in the future
await saveConfig(config);
return config;
}
exports.initConfig = initConfig;
function isLocal(configPath) {
// If the path starts with ./, look locally
if (configPath.indexOf("./") === 0) {
@@ -520,7 +304,7 @@ async function getRemoteConfig(configFile) {
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
}
const response = await api.getActionsApiClient(true).repos.getContents({
const response = await api.getApiClient().repos.getContents({
owner: pieces.groups.owner,
repo: pieces.groups.repo,
path: pieces.groups.path,
@@ -538,41 +322,35 @@ async function getRemoteConfig(configFile) {
}
return yaml.safeLoad(Buffer.from(fileContents, 'base64').toString('binary'));
}
/**
* Get the file path where the parsed config will be stored.
*/
function getPathToParsedConfigFile(tempDir) {
return path.join(tempDir, 'config');
function getConfigFolder() {
return util.getRequiredEnvParam('RUNNER_TEMP');
}
exports.getPathToParsedConfigFile = getPathToParsedConfigFile;
/**
* Store the given config to the path returned from getPathToParsedConfigFile.
*/
function getConfigFile() {
return path.join(getConfigFolder(), 'config');
}
exports.getConfigFile = getConfigFile;
async function saveConfig(config) {
const configString = JSON.stringify(config);
const configFile = getPathToParsedConfigFile(config.tempDir);
fs.mkdirSync(path.dirname(configFile), { recursive: true });
fs.writeFileSync(configFile, configString, 'utf8');
await io.mkdirP(getConfigFolder());
fs.writeFileSync(getConfigFile(), configString, 'utf8');
core.debug('Saved config:');
core.debug(configString);
}
/**
* Get the config.
*
* If this is the first time in a workflow that this is being called then
* this will parse the config from the user input. The parsed config is then
* stored to a known location. On the second and further calls, this will
* return the contents of the parsed config from the known location.
*/
async function getConfig(tempDir) {
const configFile = getPathToParsedConfigFile(tempDir);
if (!fs.existsSync(configFile)) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
async function loadConfig() {
const configFile = getConfigFile();
if (fs.existsSync(configFile)) {
const configString = fs.readFileSync(configFile, 'utf8');
core.debug('Loaded config:');
core.debug(configString);
return JSON.parse(configString);
}
else {
const config = await initConfig();
core.debug('Initialized config:');
core.debug(JSON.stringify(config));
await saveConfig(config);
return config;
}
const configString = fs.readFileSync(configFile, 'utf8');
core.debug('Loaded config:');
core.debug(configString);
return JSON.parse(configString);
}
exports.getConfig = getConfig;
exports.loadConfig = loadConfig;
//# sourceMappingURL=config-utils.js.map

File diff suppressed because one or more lines are too long

260
lib/config-utils.test.js generated
View File

@@ -16,9 +16,7 @@ const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const sinon_1 = __importDefault(require("sinon"));
const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const languages_1 = require("./languages");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
@@ -43,62 +41,27 @@ function mockGetContents(content) {
sinon_1.default.stub(api, "getApiClient").value(() => client);
return spyGetContents;
}
function mockListLanguages(languages) {
// Passing an auth token is required, so we just use a dummy value
let client = new github.GitHub('123');
const response = {
data: {},
};
for (const language of languages) {
response.data[language] = 123;
}
sinon_1.default.stub(client.repos, "listLanguages").resolves(response);
sinon_1.default.stub(api, "getApiClient").value(() => client);
}
ava_1.default("load empty config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', undefined);
setInput('languages', 'javascript,python');
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function () {
return {
byLanguage: {},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
const config = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
t.deepEqual(config, await configUtils.getDefaultConfig(tmpDir, tmpDir, codeQL));
const config = await configUtils.loadConfig();
t.deepEqual(config, new configUtils.Config());
});
});
ava_1.default("loading config saves config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', undefined);
setInput('languages', 'javascript,python');
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function () {
return {
byLanguage: {},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
const configFile = configUtils.getConfigFile();
// Sanity check the saved config file does not already exist
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// Sanity check that getConfig throws before we have called initConfig
await t.throwsAsync(() => configUtils.getConfig(tmpDir));
const config1 = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
t.false(fs.existsSync(configFile));
const config = await configUtils.loadConfig();
// The saved config file should now exist
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// And that same newly-initialised config should now be returned by getConfig
const config2 = await configUtils.getConfig(tmpDir);
t.deepEqual(config1, config2);
t.true(fs.existsSync(configFile));
// And the contents should parse correctly to the config that was returned
t.deepEqual(fs.readFileSync(configFile, 'utf8'), JSON.stringify(config));
});
});
ava_1.default("load input outside of workspace", async (t) => {
@@ -107,8 +70,8 @@ ava_1.default("load input outside of workspace", async (t) => {
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('config-file', '../input');
try {
await configUtils.initConfig(tmpDir, tmpDir, codeql_1.getCachedCodeQL());
throw new Error('initConfig did not throw error');
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, '../input'))));
@@ -122,8 +85,8 @@ ava_1.default("load non-local input with invalid repo syntax", async (t) => {
// no filename given, just a repo
setInput('config-file', 'octo-org/codeql-config@main');
try {
await configUtils.initConfig(tmpDir, tmpDir, codeql_1.getCachedCodeQL());
throw new Error('initConfig did not throw error');
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage('octo-org/codeql-config@main')));
@@ -136,10 +99,9 @@ ava_1.default("load non-existent input", async (t) => {
process.env['GITHUB_WORKSPACE'] = tmpDir;
t.false(fs.existsSync(path.join(tmpDir, 'input')));
setInput('config-file', 'input');
setInput('languages', 'javascript');
try {
await configUtils.initConfig(tmpDir, tmpDir, codeql_1.getCachedCodeQL());
throw new Error('initConfig did not throw error');
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, 'input'))));
@@ -150,118 +112,10 @@ ava_1.default("load non-empty input", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function () {
return {
byLanguage: {
'javascript': {
'/foo/a.ql': {},
'/bar/b.ql': {},
},
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
// Just create a generic config object with non-default values for all fields
const inputFileContents = `
name: my config
disable-default-queries: true
queries:
- uses: ./foo
paths-ignore:
- a
- b
paths:
- c/d`;
fs.mkdirSync(path.join(tmpDir, 'foo'));
// And the config we expect it to parse to
const expectedConfig = {
languages: [languages_1.Language.javascript],
queries: { 'javascript': ['/foo/a.ql', '/bar/b.ql'] },
pathsIgnore: ['a', 'b'],
paths: ['c/d'],
originalUserInput: {
name: 'my config',
'disable-default-queries': true,
queries: [{ uses: './foo' }],
'paths-ignore': ['a', 'b'],
paths: ['c/d'],
},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: codeQL.getPath(),
};
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
setInput('languages', 'javascript');
const actualConfig = await configUtils.initConfig(tmpDir, tmpDir, codeQL);
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
});
ava_1.default("default queries are used", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
// Check that the default behaviour is to add the default queries.
// In this case if a config file is specified but does not include
// the disable-default-queries field.
// We determine this by whether CodeQL.resolveQueries is called
// with the correct arguments.
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function (queries, extraSearchPath) {
resolveQueriesArgs.push({ queries, extraSearchPath });
return {
byLanguage: {
'javascript': {
'foo.ql': {},
},
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
// The important point of this config is that it doesn't specify
// the disable-default-queries field.
// Any other details are hopefully irrelevant for this tetst.
const inputFileContents = `
paths:
- foo`;
fs.mkdirSync(path.join(tmpDir, 'foo'));
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
setInput('languages', 'javascript');
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
// Check resolve queries was called correctly
t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries, ['javascript-code-scanning.qls']);
t.deepEqual(resolveQueriesArgs[0].extraSearchPath, undefined);
});
});
ava_1.default("API client used when reading remote config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function () {
return {
byLanguage: {
'javascript': {
'foo.ql': {},
},
},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
const inputFileContents = `
name: my config
disable-default-queries: true
queries:
- uses: ./
- uses: ./foo
@@ -269,17 +123,45 @@ ava_1.default("API client used when reading remote config", async (t) => {
paths-ignore:
- a
- b
paths:
- c/d`;
fs.mkdirSync(path.join(tmpDir, 'foo'));
// And the config we expect it to parse to
const expectedConfig = new configUtils.Config();
expectedConfig.name = 'my config';
expectedConfig.disableDefaultQueries = true;
expectedConfig.additionalQueries.push(fs.realpathSync(tmpDir));
expectedConfig.additionalQueries.push(fs.realpathSync(path.join(tmpDir, 'foo')));
expectedConfig.externalQueries = [new configUtils.ExternalQuery('foo/bar', 'dev')];
expectedConfig.pathsIgnore = ['a', 'b'];
expectedConfig.paths = ['c/d'];
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
setInput('config-file', 'input');
const actualConfig = await configUtils.loadConfig();
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
});
ava_1.default("API client used when reading remote config", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
const inputFileContents = `
name: my config
disable-default-queries: true
queries:
- uses: ./
paths-ignore:
- a
- b
paths:
- c/d`;
const dummyResponse = {
content: Buffer.from(inputFileContents).toString("base64"),
};
const spyGetContents = mockGetContents(dummyResponse);
// Create checkout directory for remote queries repository
fs.mkdirSync(path.join(tmpDir, 'foo/bar'), { recursive: true });
setInput('config-file', 'octo-org/codeql-config/config.yaml@main');
setInput('languages', 'javascript');
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
await configUtils.loadConfig();
t.assert(spyGetContents.called);
});
});
@@ -292,8 +174,8 @@ ava_1.default("Remote config handles the case where a directory is provided", as
const repoReference = 'octo-org/codeql-config/config.yaml@main';
setInput('config-file', repoReference);
try {
await configUtils.initConfig(tmpDir, tmpDir, codeql_1.getCachedCodeQL());
throw new Error('initConfig did not throw error');
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileDirectoryGivenMessage(repoReference)));
@@ -311,63 +193,25 @@ ava_1.default("Invalid format of remote config handled correctly", async (t) =>
const repoReference = 'octo-org/codeql-config/config.yaml@main';
setInput('config-file', repoReference);
try {
await configUtils.initConfig(tmpDir, tmpDir, codeql_1.getCachedCodeQL());
throw new Error('initConfig did not throw error');
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference)));
}
});
});
ava_1.default("No detected languages", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
mockListLanguages([]);
try {
await configUtils.initConfig(tmpDir, tmpDir, codeql_1.getCachedCodeQL());
throw new Error('initConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getNoLanguagesError()));
}
});
});
ava_1.default("Unknown languages", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
setInput('languages', 'ruby,english');
try {
await configUtils.initConfig(tmpDir, tmpDir, codeql_1.getCachedCodeQL());
throw new Error('initConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(['ruby', 'english'])));
}
});
});
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
ava_1.default("load invalid input - " + testName, async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env['RUNNER_TEMP'] = tmpDir;
process.env['GITHUB_WORKSPACE'] = tmpDir;
const codeQL = codeql_1.setCodeQL({
resolveQueries: async function () {
return {
byLanguage: {},
noDeclaredLanguage: {},
multipleDeclaredLanguages: {},
};
},
});
const inputFile = path.join(tmpDir, 'input');
fs.writeFileSync(inputFile, inputFileContents, 'utf8');
setInput('config-file', 'input');
setInput('languages', 'javascript');
try {
await configUtils.initConfig(tmpDir, tmpDir, codeQL);
throw new Error('initConfig did not throw error');
await configUtils.loadConfig();
throw new Error('loadConfig did not throw error');
}
catch (err) {
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));

File diff suppressed because one or more lines are too long

View File

@@ -1,3 +0,0 @@
{
"bundleVersion": "codeql-bundle-20200630"
}

View File

@@ -11,22 +11,23 @@ const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
/**
* Check out repository at the given ref, and return the directory of the checkout.
*/
async function checkoutExternalRepository(repository, ref, tempDir) {
core.info('Checking out ' + repository);
const checkoutLocation = path.join(tempDir, repository);
if (!fs.existsSync(checkoutLocation)) {
const repoURL = 'https://github.com/' + repository + '.git';
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
await exec.exec('git', [
'--work-tree=' + checkoutLocation,
'--git-dir=' + checkoutLocation + '/.git',
'checkout', ref,
]);
const util = __importStar(require("./util"));
async function checkoutExternalQueries(config) {
const folder = util.getRequiredEnvParam('RUNNER_TEMP');
for (const externalQuery of config.externalQueries) {
core.info('Checking out ' + externalQuery.repository);
const checkoutLocation = path.join(folder, externalQuery.repository);
if (!fs.existsSync(checkoutLocation)) {
const repoURL = 'https://github.com/' + externalQuery.repository + '.git';
await exec.exec('git', ['clone', repoURL, checkoutLocation]);
await exec.exec('git', [
'--work-tree=' + checkoutLocation,
'--git-dir=' + checkoutLocation + '/.git',
'checkout', externalQuery.ref,
]);
}
config.additionalQueries.push(path.join(checkoutLocation, externalQuery.path));
}
return checkoutLocation;
}
exports.checkoutExternalRepository = checkoutExternalRepository;
exports.checkoutExternalQueries = checkoutExternalQueries;
//# sourceMappingURL=external-queries.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAE7B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAAC,UAAkB,EAAE,GAAW,EAAE,OAAe;IAC/F,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,UAAU,CAAC,CAAC;IAExC,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,CAAC,CAAC;IACxD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,UAAU,GAAG,MAAM,CAAC;QAC5D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;QAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACrB,cAAc,GAAG,gBAAgB;YACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;YACzC,UAAU,EAAE,GAAG;SAChB,CAAC,CAAC;KACJ;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAfD,gEAeC"}
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAG7B,6CAA+B;AAExB,KAAK,UAAU,uBAAuB,CAAC,MAA0B;IACtE,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAC;IAEvD,KAAK,MAAM,aAAa,IAAI,MAAM,CAAC,eAAe,EAAE;QAClD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,UAAU,CAAC,CAAC;QAEtD,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,aAAa,CAAC,UAAU,CAAC,CAAC;QACrE,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;YACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,aAAa,CAAC,UAAU,GAAG,MAAM,CAAC;YAC1E,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;YAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;gBACrB,cAAc,GAAG,gBAAgB;gBACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;gBACzC,UAAU,EAAE,aAAa,CAAC,GAAG;aAC9B,CAAC,CAAC;SACJ;QAED,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;KAChF;AACH,CAAC;AAnBD,0DAmBC"}

View File

@@ -13,14 +13,20 @@ Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const configUtils = __importStar(require("./config-utils"));
const externalQueries = __importStar(require("./external-queries"));
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
ava_1.default("checkoutExternalQueries", async (t) => {
let config = new configUtils.Config();
config.externalQueries = [
new configUtils.ExternalQuery("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b"),
];
await util.withTmpDir(async (tmpDir) => {
await externalQueries.checkoutExternalRepository("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b", tmpDir);
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in the default branch
process.env["RUNNER_TEMP"] = tmpDir;
await externalQueries.checkoutExternalQueries(config);
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
});
});

View File

@@ -1 +1 @@
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,MAAM,eAAe,CAAC,0BAA0B,CAC9C,kBAAkB,EAClB,0CAA0C,EAC1C,MAAM,CAAC,CAAC;QAEV,mGAAmG;QACnG,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IAC/E,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,4DAA8C;AAC9C,oEAAsD;AACtD,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACxC,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,eAAe,GAAG;QACvB,IAAI,WAAW,CAAC,aAAa,CAAC,kBAAkB,EAAE,0CAA0C,CAAC;KAC9F,CAAC;IAEF,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC;QACpC,MAAM,eAAe,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtD,uFAAuF;QACvF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IAC/E,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

164
lib/finalize-db.js generated
View File

@@ -8,106 +8,148 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const io = __importStar(require("@actions/io"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const externalQueries = __importStar(require("./external-queries"));
const sharedEnv = __importStar(require("./shared-environment"));
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
async function sendStatusReport(startedAt, queriesStats, uploadStats, error) {
var _a, _b, _c;
const status = ((_a = queriesStats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined ? 'failure' : 'success';
const statusReportBase = await util.createStatusReportBase('finish', status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
const statusReport = {
...statusReportBase,
...(queriesStats || {}),
...(uploadStats || {}),
};
await util.sendStatusReport(statusReport);
/**
* A list of queries from https://github.com/github/codeql that
* we don't want to run. Disabling them here is a quicker alternative to
* disabling them in the code scanning query suites. Queries should also
* be disabled in the suites, and removed from this list here once the
* bundle is updated to make those suite changes live.
*
* Format is a map from language to an array of path suffixes of .ql files.
*/
const DISABLED_BUILTIN_QUERIES = {
'csharp': [
'ql/src/Security Features/CWE-937/VulnerablePackage.ql',
'ql/src/Security Features/CWE-451/MissingXFrameOptions.ql',
]
};
function queryIsDisabled(language, query) {
return (DISABLED_BUILTIN_QUERIES[language] || [])
.some(disabledQuery => query.endsWith(disabledQuery));
}
async function createdDBForScannedLanguages(databaseFolder, config) {
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
if (languages_1.isScannedLanguage(language)) {
async function createdDBForScannedLanguages(databaseFolder) {
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
if (scannedLanguages) {
const codeql = codeql_1.getCodeQL();
for (const language of scannedLanguages.split(',')) {
core.startGroup('Extracting ' + language);
await codeql.extractScannedLanguage(path.join(databaseFolder, language), language);
core.endGroup();
}
}
}
async function finalizeDatabaseCreation(databaseFolder, config) {
await createdDBForScannedLanguages(databaseFolder, config);
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
async function finalizeDatabaseCreation(databaseFolder) {
await createdDBForScannedLanguages(databaseFolder);
const languages = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES] || '';
const codeql = codeql_1.getCodeQL();
for (const language of languages.split(',')) {
core.startGroup('Finalizing ' + language);
await codeql.finalizeDatabase(path.join(databaseFolder, language));
core.endGroup();
}
}
// Runs queries and creates sarif files in the given folder
async function runQueries(databaseFolder, sarifFolder, config) {
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (let language of fs.readdirSync(databaseFolder)) {
core.startGroup('Analyzing ' + language);
const queries = config.queries[language] || [];
if (queries.length === 0) {
throw new Error('Unable to analyse ' + language + ' as no queries were selected for this language');
async function resolveQueryLanguages(config) {
let res = new Map();
const codeql = codeql_1.getCodeQL();
if (!config.disableDefaultQueries || config.additionalSuites.length !== 0) {
const suites = [];
for (const language of await util.getLanguages()) {
if (!config.disableDefaultQueries) {
suites.push(language + '-code-scanning.qls');
}
for (const additionalSuite of config.additionalSuites) {
suites.push(language + '-' + additionalSuite + '.qls');
}
}
try {
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuite = path.join(databaseFolder, language + '-queries.qls');
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
fs.writeFileSync(querySuite, querySuiteContents);
core.debug('Query suite file for ' + language + '...\n' + querySuiteContents);
const sarifFile = path.join(sarifFolder, language + '.sarif');
await codeql.databaseAnalyze(path.join(databaseFolder, language), sarifFile, querySuite);
core.debug('SARIF results for database ' + language + ' created at "' + sarifFile + '"');
core.endGroup();
}
catch (e) {
// For now the fields about query performance are not populated
return {
analyze_failure_language: language,
};
const resolveQueriesOutputObject = await codeql.resolveQueries(suites);
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
if (res[language] === undefined) {
res[language] = [];
}
res[language].push(...Object.keys(queries).filter(q => !queryIsDisabled(language, q)));
}
}
return {};
if (config.additionalQueries.length !== 0) {
const resolveQueriesOutputObject = await codeql.resolveQueries(config.additionalQueries);
for (const [language, queries] of Object.entries(resolveQueriesOutputObject.byLanguage)) {
if (res[language] === undefined) {
res[language] = [];
}
res[language].push(...Object.keys(queries));
}
const noDeclaredLanguage = resolveQueriesOutputObject.noDeclaredLanguage;
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
if (noDeclaredLanguageQueries.length !== 0) {
throw new Error('Some queries do not declare a language, their qlpack.yml file is missing or is invalid');
}
const multipleDeclaredLanguages = resolveQueriesOutputObject.multipleDeclaredLanguages;
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
if (multipleDeclaredLanguagesQueries.length !== 0) {
throw new Error('Some queries declare multiple languages, their qlpack.yml file is missing or is invalid');
}
}
return res;
}
// Runs queries and creates sarif files in the given folder
async function runQueries(databaseFolder, sarifFolder, config) {
const queriesPerLanguage = await resolveQueryLanguages(config);
const codeql = codeql_1.getCodeQL();
for (let database of fs.readdirSync(databaseFolder)) {
core.startGroup('Analyzing ' + database);
const queries = queriesPerLanguage[database] || [];
if (queries.length === 0) {
throw new Error('Unable to analyse ' + database + ' as no queries were selected for this language');
}
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuite = path.join(databaseFolder, database + '-queries.qls');
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
fs.writeFileSync(querySuite, querySuiteContents);
core.debug('Query suite file for ' + database + '...\n' + querySuiteContents);
const sarifFile = path.join(sarifFolder, database + '.sarif');
await codeql.databaseAnalyze(path.join(databaseFolder, database), sarifFile, querySuite);
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
core.endGroup();
}
}
async function run() {
const startedAt = new Date();
let queriesStats = undefined;
let uploadStats = undefined;
try {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('finish', 'starting', startedAt), true)) {
if (util.should_abort('finish', true) || !await util.reportActionStarting('finish')) {
return;
}
const config = await configUtils.getConfig(util.getRequiredEnvParam('RUNNER_TEMP'));
const config = await configUtils.loadConfig();
core.exportVariable(sharedEnv.ODASA_TRACER_CONFIGURATION, '');
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
const databaseFolder = util.getCodeQLDatabasesDir(config.tempDir);
const databaseFolder = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_DATABASE_DIR);
const sarifFolder = core.getInput('output');
fs.mkdirSync(sarifFolder, { recursive: true });
await io.mkdirP(sarifFolder);
core.info('Finalizing database creation');
await finalizeDatabaseCreation(databaseFolder, config);
await finalizeDatabaseCreation(databaseFolder);
await externalQueries.checkoutExternalQueries(config);
core.info('Analyzing database');
queriesStats = await runQueries(databaseFolder, sarifFolder, config);
await runQueries(databaseFolder, sarifFolder, config);
if ('true' === core.getInput('upload')) {
uploadStats = await upload_lib.upload(sarifFolder, repository_1.parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')), await util.getCommitOid(), util.getRef(), await util.getAnalysisKey(), util.getRequiredEnvParam('GITHUB_WORKFLOW'), util.getWorkflowRunID(), core.getInput('checkout_path'), core.getInput('matrix'), core.getInput('token'), util.getRequiredEnvParam('GITHUB_API_URL'), 'actions', logging_1.getActionsLogger());
if (!await upload_lib.upload(sarifFolder)) {
await util.reportActionFailed('finish', 'upload');
return;
}
}
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await sendStatusReport(startedAt, queriesStats, uploadStats, error);
await util.reportActionFailed('finish', error.message, error.stack);
return;
}
await sendStatusReport(startedAt, queriesStats, uploadStats);
await util.reportActionSucceeded('finish');
}
run().catch(e => {
core.setFailed("analyze action failed: " + e);

File diff suppressed because one or more lines are too long

36
lib/fingerprints.js generated
View File

@@ -10,6 +10,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const fs = __importStar(require("fs"));
const long_1 = __importDefault(require("long"));
const tab = '\t'.charCodeAt(0);
@@ -121,7 +122,7 @@ function hash(callback, input) {
exports.hash = hash;
// Generate a hash callback function that updates the given result in-place
// when it recieves a hash for the correct line number. Ignores hashes for other lines.
function locationUpdateCallback(result, location, logger) {
function locationUpdateCallback(result, location) {
var _a, _b;
let locationStartLine = (_b = (_a = location.physicalLocation) === null || _a === void 0 ? void 0 : _a.region) === null || _b === void 0 ? void 0 : _b.startLine;
if (locationStartLine === undefined) {
@@ -145,10 +146,10 @@ function locationUpdateCallback(result, location, logger) {
result.partialFingerprints.primaryLocationLineHash = hash;
}
else if (existingFingerprint !== hash) {
logger.warning('Calculated fingerprint of ' + hash +
' for file ' + location.physicalLocation.artifactLocation.uri +
' line ' + lineNumber +
', but found existing inconsistent fingerprint value ' + existingFingerprint);
core.warning("Calculated fingerprint of " + hash +
" for file " + location.physicalLocation.artifactLocation.uri +
" line " + lineNumber +
", but found existing inconsistent fingerprint value " + existingFingerprint);
}
};
}
@@ -156,21 +157,21 @@ function locationUpdateCallback(result, location, logger) {
// the source file so we can hash it.
// If possible returns a absolute file path for the source file,
// or if not possible then returns undefined.
function resolveUriToFile(location, artifacts, logger) {
function resolveUriToFile(location, artifacts) {
// This may be referencing an artifact
if (!location.uri && location.index !== undefined) {
if (typeof location.index !== 'number' ||
location.index < 0 ||
location.index >= artifacts.length ||
typeof artifacts[location.index].location !== 'object') {
logger.debug(`Ignoring location as URI "${location.index}" is invalid`);
core.debug('Ignoring location as index "' + location.index + '" is invalid');
return undefined;
}
location = artifacts[location.index].location;
}
// Get the URI and decode
if (typeof location.uri !== 'string') {
logger.debug(`Ignoring location as index "${location.uri}" is invalid`);
core.debug('Ignoring location as uri "' + location.uri + '" is invalid');
return undefined;
}
let uri = decodeURIComponent(location.uri);
@@ -180,13 +181,13 @@ function resolveUriToFile(location, artifacts, logger) {
uri = uri.substring(fileUriPrefix.length);
}
if (uri.indexOf('://') !== -1) {
logger.debug(`Ignoring location URI "${uri}" as the scheme is not recognised`);
core.debug('Ignoring location URI "' + uri + "' as the scheme is not recognised");
return undefined;
}
// Discard any absolute paths that aren't in the src root
const srcRootPrefix = process.env['GITHUB_WORKSPACE'] + '/';
if (uri.startsWith('/') && !uri.startsWith(srcRootPrefix)) {
logger.debug(`Ignoring location URI "${uri}" as it is outside of the src root`);
core.debug('Ignoring location URI "' + uri + "' as it is outside of the src root");
return undefined;
}
// Just assume a relative path is relative to the src root.
@@ -197,7 +198,7 @@ function resolveUriToFile(location, artifacts, logger) {
}
// Check the file exists
if (!fs.existsSync(uri)) {
logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
core.debug("Unable to compute fingerprint for non-existent file: " + uri);
return undefined;
}
return uri;
@@ -205,8 +206,7 @@ function resolveUriToFile(location, artifacts, logger) {
exports.resolveUriToFile = resolveUriToFile;
// Compute fingerprints for results in the given sarif file
// and return an updated sarif file contents.
function addFingerprints(sarifContents, logger) {
var _a, _b;
function addFingerprints(sarifContents) {
let sarif = JSON.parse(sarifContents);
// Gather together results for the same file and construct
// callbacks to accept hashes for that file and update the location
@@ -217,18 +217,20 @@ function addFingerprints(sarifContents, logger) {
for (const result of run.results || []) {
// Check the primary location is defined correctly and is in the src root
const primaryLocation = (result.locations || [])[0];
if (!((_b = (_a = primaryLocation) === null || _a === void 0 ? void 0 : _a.physicalLocation) === null || _b === void 0 ? void 0 : _b.artifactLocation)) {
logger.debug(`Unable to compute fingerprint for invalid location: ${JSON.stringify(primaryLocation)}`);
if (!primaryLocation ||
!primaryLocation.physicalLocation ||
!primaryLocation.physicalLocation.artifactLocation) {
core.debug("Unable to compute fingerprint for invalid location: " + JSON.stringify(primaryLocation));
continue;
}
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts, logger);
const filepath = resolveUriToFile(primaryLocation.physicalLocation.artifactLocation, artifacts);
if (!filepath) {
continue;
}
if (!callbacksByFile[filepath]) {
callbacksByFile[filepath] = [];
}
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation, logger));
callbacksByFile[filepath].push(locationUpdateCallback(result, primaryLocation));
}
}
// Now hash each file that was found

File diff suppressed because one or more lines are too long

View File

@@ -14,7 +14,6 @@ const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const fingerprints = __importStar(require("./fingerprints"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
testing_utils_1.setupTests(ava_1.default);
function testHash(t, input, expectedHashes) {
@@ -99,7 +98,7 @@ ava_1.default('hash', (t) => {
function testResolveUriToFile(uri, index, artifactsURIs) {
const location = { "uri": uri, "index": index };
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
return fingerprints.resolveUriToFile(location, artifacts, logging_1.getCLILogger());
return fingerprints.resolveUriToFile(location, artifacts);
}
ava_1.default('resolveUriToFile', t => {
// The resolveUriToFile method checks that the file exists and is in the right directory
@@ -144,7 +143,7 @@ ava_1.default('addFingerprints', t => {
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
t.deepEqual(fingerprints.addFingerprints(input, logging_1.getCLILogger()), expected);
t.deepEqual(fingerprints.addFingerprints(input), expected);
});
ava_1.default('missingRegions', t => {
// Run an end-to-end test on a test file
@@ -155,6 +154,6 @@ ava_1.default('missingRegions', t => {
expected = JSON.stringify(JSON.parse(expected));
// The URIs in the SARIF files resolve to files in the testdata directory
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
t.deepEqual(fingerprints.addFingerprints(input, logging_1.getCLILogger()), expected);
t.deepEqual(fingerprints.addFingerprints(input), expected);
});
//# sourceMappingURL=fingerprints.test.js.map

File diff suppressed because one or more lines are too long

43
lib/languages.js generated
View File

@@ -1,43 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// All the languages supported by CodeQL
var Language;
(function (Language) {
Language["csharp"] = "csharp";
Language["cpp"] = "cpp";
Language["go"] = "go";
Language["java"] = "java";
Language["javascript"] = "javascript";
Language["python"] = "python";
})(Language = exports.Language || (exports.Language = {}));
// Additional names for languages
const LANGUAGE_ALIASES = {
'c': Language.cpp,
'c++': Language.cpp,
'c#': Language.csharp,
'typescript': Language.javascript,
};
// Translate from user input or GitHub's API names for languages to CodeQL's names for languages
function parseLanguage(language) {
// Normalise to lower case
language = language.toLowerCase();
// See if it's an exact match
if (language in Language) {
return language;
}
// Check language aliases
if (language in LANGUAGE_ALIASES) {
return LANGUAGE_ALIASES[language];
}
return undefined;
}
exports.parseLanguage = parseLanguage;
function isTracedLanguage(language) {
return ['cpp', 'java', 'csharp'].includes(language);
}
exports.isTracedLanguage = isTracedLanguage;
function isScannedLanguage(language) {
return !isTracedLanguage(language);
}
exports.isScannedLanguage = isScannedLanguage;
//# sourceMappingURL=languages.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;AAAA,wCAAwC;AACxC,IAAY,QAOX;AAPD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;AACnB,CAAC,EAPW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAOnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAA+B;IACnD,GAAG,EAAE,QAAQ,CAAC,GAAG;IACjB,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,YAAY,EAAE,QAAQ,CAAC,UAAU;CAClC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAGD,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACtD,CAAC;AAFD,4CAEC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}

44
lib/languages.test.js generated
View File

@@ -1,44 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const languages_1 = require("./languages");
const testing_utils_1 = require("./testing-utils");
testing_utils_1.setupTests(ava_1.default);
ava_1.default('parseLangauge', async (t) => {
// Exact matches
t.deepEqual(languages_1.parseLanguage('csharp'), languages_1.Language.csharp);
t.deepEqual(languages_1.parseLanguage('cpp'), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage('go'), languages_1.Language.go);
t.deepEqual(languages_1.parseLanguage('java'), languages_1.Language.java);
t.deepEqual(languages_1.parseLanguage('javascript'), languages_1.Language.javascript);
t.deepEqual(languages_1.parseLanguage('python'), languages_1.Language.python);
// Aliases
t.deepEqual(languages_1.parseLanguage('c'), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage('c++'), languages_1.Language.cpp);
t.deepEqual(languages_1.parseLanguage('c#'), languages_1.Language.csharp);
t.deepEqual(languages_1.parseLanguage('typescript'), languages_1.Language.javascript);
// Not matches
t.deepEqual(languages_1.parseLanguage('foo'), undefined);
t.deepEqual(languages_1.parseLanguage(' '), undefined);
t.deepEqual(languages_1.parseLanguage(''), undefined);
});
ava_1.default('isTracedLanguage', async (t) => {
t.true(languages_1.isTracedLanguage(languages_1.Language.cpp));
t.true(languages_1.isTracedLanguage(languages_1.Language.java));
t.true(languages_1.isTracedLanguage(languages_1.Language.csharp));
t.false(languages_1.isTracedLanguage(languages_1.Language.go));
t.false(languages_1.isTracedLanguage(languages_1.Language.javascript));
t.false(languages_1.isTracedLanguage(languages_1.Language.python));
});
ava_1.default('isScannedLanguage', async (t) => {
t.false(languages_1.isScannedLanguage(languages_1.Language.cpp));
t.false(languages_1.isScannedLanguage(languages_1.Language.java));
t.false(languages_1.isScannedLanguage(languages_1.Language.csharp));
t.true(languages_1.isScannedLanguage(languages_1.Language.go));
t.true(languages_1.isScannedLanguage(languages_1.Language.javascript));
t.true(languages_1.isScannedLanguage(languages_1.Language.python));
});
//# sourceMappingURL=languages.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"languages.test.js","sourceRoot":"","sources":["../src/languages.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,2CAAyF;AACzF,mDAA2C;AAE3C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,gBAAgB;IAChB,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IACtD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,MAAM,CAAC,EAAE,oBAAQ,CAAC,IAAI,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAC9D,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,QAAQ,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAEtD,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAC9C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,oBAAQ,CAAC,GAAG,CAAC,CAAC;IAChD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,IAAI,CAAC,EAAE,oBAAQ,CAAC,MAAM,CAAC,CAAC;IAClD,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,YAAY,CAAC,EAAE,oBAAQ,CAAC,UAAU,CAAC,CAAC;IAE9D,cAAc;IACd,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;IAC7C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,GAAG,CAAC,EAAE,SAAS,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,yBAAa,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAC;AAC5C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACjC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IACxC,CAAC,CAAC,IAAI,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE1C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,KAAK,CAAC,4BAAgB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mBAAmB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAClC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,GAAG,CAAC,CAAC,CAAC;IACzC,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC,CAAC,KAAK,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAE5C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC/C,CAAC,CAAC,IAAI,CAAC,6BAAiB,CAAC,oBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7C,CAAC,CAAC,CAAC"}

26
lib/logging.js generated
View File

@@ -1,26 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
function getActionsLogger() {
return core;
}
exports.getActionsLogger = getActionsLogger;
function getCLILogger() {
return {
debug: console.debug,
info: console.info,
warning: console.warn,
error: console.error,
startGroup: () => undefined,
endGroup: () => undefined,
};
}
exports.getCLILogger = getCLILogger;
//# sourceMappingURL=logging.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAYtC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAFD,4CAEC;AAED,SAAgB,YAAY;IAC1B,OAAO;QACL,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AATD,oCASC"}

14
lib/repository.js generated
View File

@@ -1,14 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function parseRepositoryNwo(input) {
const parts = input.split('/');
if (parts.length !== 2) {
throw new Error(`"${input}" is not a valid repository name`);
}
return {
owner: parts[0],
repo: parts[1],
};
}
exports.parseRepositoryNwo = parseRepositoryNwo;
//# sourceMappingURL=repository.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"repository.js","sourceRoot":"","sources":["../src/repository.ts"],"names":[],"mappings":";;AAMA,SAAgB,kBAAkB,CAAC,KAAa;IAC9C,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QACtB,MAAM,IAAI,KAAK,CAAC,IAAI,KAAK,kCAAkC,CAAC,CAAC;KAC9D;IACD,OAAO;QACL,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC;QACf,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;KACf,CAAC;AACJ,CAAC;AATD,gDASC"}

103
lib/setup-tracer.js generated
View File

@@ -9,12 +9,13 @@ var __importStar = (this && this.__importStar) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const io = __importStar(require("@actions/io"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const languages_1 = require("./languages");
const sharedEnv = __importStar(require("./shared-environment"));
const util = __importStar(require("./util"));
const CRITICAL_TRACER_VARS = new Set(['SEMMLE_PRELOAD_libtrace',
,
@@ -50,14 +51,14 @@ async function tracerConfig(codeql, database, compilerSpec) {
}
return info;
}
function concatTracerConfigs(tracerConfigs, config) {
function concatTracerConfigs(configs) {
// A tracer config is a map containing additional environment variables and a tracer 'spec' file.
// A tracer 'spec' file has the following format [log_file, number_of_blocks, blocks_text]
// Merge the environments
const env = {};
let copyExecutables = false;
let envSize = 0;
for (const v of tracerConfigs) {
for (let v of Object.values(configs)) {
for (let e of Object.entries(v.env)) {
const name = e[0];
const value = e[1];
@@ -78,7 +79,7 @@ function concatTracerConfigs(tracerConfigs, config) {
}
}
// Concatenate spec files into a new spec file
let languages = Object.keys(tracerConfigs);
let languages = Object.keys(configs);
const cppIndex = languages.indexOf('cpp');
// Make sure cpp is the last language, if it's present since it must be concatenated last
if (cppIndex !== -1) {
@@ -89,14 +90,15 @@ function concatTracerConfigs(tracerConfigs, config) {
let totalLines = [];
let totalCount = 0;
for (let lang of languages) {
const lines = fs.readFileSync(tracerConfigs[lang].spec, 'utf8').split(/\r?\n/);
const lines = fs.readFileSync(configs[lang].spec, 'utf8').split(/\r?\n/);
const count = parseInt(lines[1], 10);
totalCount += count;
totalLines.push(...lines.slice(2));
}
const newLogFilePath = path.resolve(config.tempDir, 'compound-build-tracer.log');
const spec = path.resolve(config.tempDir, 'compound-spec');
const compoundTempFolder = path.resolve(config.tempDir, 'compound-temp');
const tempFolder = util.getRequiredEnvParam('RUNNER_TEMP');
const newLogFilePath = path.resolve(tempFolder, 'compound-build-tracer.log');
const spec = path.resolve(tempFolder, 'compound-spec');
const compoundTempFolder = path.resolve(tempFolder, 'compound-temp');
const newSpecContent = [newLogFilePath, totalCount.toString(10), ...totalLines];
if (copyExecutables) {
env['SEMMLE_COPY_EXECUTABLES_ROOT'] = compoundTempFolder;
@@ -119,51 +121,34 @@ function concatTracerConfigs(tracerConfigs, config) {
fs.writeFileSync(envPath, buffer);
return { env, spec };
}
async function sendSuccessStatusReport(startedAt, config) {
const statusReportBase = await util.createStatusReportBase('init', 'success', startedAt);
const languages = config.languages.join(',');
const workflowLanguages = core.getInput('languages', { required: false });
const paths = (config.originalUserInput.paths || []).join(',');
const pathsIgnore = (config.originalUserInput['paths-ignore'] || []).join(',');
const disableDefaultQueries = config.originalUserInput['disable-default-queries'] ? languages : '';
const queries = (config.originalUserInput.queries || []).map(q => q.uses).join(',');
const statusReport = {
...statusReportBase,
languages: languages,
workflow_languages: workflowLanguages,
paths: paths,
paths_ignore: pathsIgnore,
disable_default_queries: disableDefaultQueries,
queries: queries,
};
await util.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
let config;
let codeql;
let languages;
try {
util.prepareLocalRunEnvironment();
if (!await util.sendStatusReport(await util.createStatusReportBase('init', 'starting', startedAt), true)) {
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
return;
}
core.startGroup('Setup CodeQL tools');
codeql = await codeql_1.setupCodeQL();
await codeql.printVersion();
core.endGroup();
core.startGroup('Load language configuration');
config = await configUtils.initConfig(util.getRequiredEnvParam('RUNNER_TEMP'), util.getRequiredEnvParam('RUNNER_TOOL_CACHE'), codeql);
analysisPaths.includeAndExcludeAnalysisPaths(config);
const config = await configUtils.loadConfig();
languages = await util.getLanguages();
// If the languages parameter was not given and no languages were
// detected then fail here as this is a workflow configuration error.
if (languages.length === 0) {
throw new Error("Did not detect any languages to analyze. Please update input in workflow.");
}
analysisPaths.includeAndExcludeAnalysisPaths(config, languages);
core.endGroup();
}
catch (e) {
core.setFailed(e.message);
console.log(e);
await util.sendStatusReport(await util.createStatusReportBase('init', 'aborted', startedAt, e.message));
await util.reportActionAborted('init', e.message);
return;
}
try {
const sourceRoot = path.resolve();
core.startGroup('Setup CodeQL tools');
const codeql = await codeql_1.setupCodeQL();
await codeql.printVersion();
core.endGroup();
// Forward Go flags
const goFlags = process.env['GOFLAGS'];
if (goFlags) {
@@ -173,50 +158,58 @@ async function run() {
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
const codeqlRam = process.env['CODEQL_RAM'] || '6500';
core.exportVariable('CODEQL_RAM', codeqlRam);
const databaseFolder = util.getCodeQLDatabasesDir(config.tempDir);
fs.mkdirSync(databaseFolder, { recursive: true });
let tracedLanguageConfigs = [];
const databaseFolder = path.resolve(util.getRequiredEnvParam('RUNNER_TEMP'), 'codeql_databases');
await io.mkdirP(databaseFolder);
let tracedLanguages = {};
let scannedLanguages = [];
// TODO: replace this code once CodeQL supports multi-language tracing
for (let language of config.languages) {
for (let language of languages) {
const languageDatabase = path.join(databaseFolder, language);
// Init language database
await codeql.databaseInit(languageDatabase, language, sourceRoot);
// TODO: add better detection of 'traced languages' instead of using a hard coded list
if (languages_1.isTracedLanguage(language)) {
if (['cpp', 'java', 'csharp'].includes(language)) {
const config = await tracerConfig(codeql, languageDatabase);
tracedLanguageConfigs.push(config);
tracedLanguages[language] = config;
}
else {
scannedLanguages.push(language);
}
}
if (tracedLanguageConfigs.length > 0) {
const mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
const tracedLanguageKeys = Object.keys(tracedLanguages);
if (tracedLanguageKeys.length > 0) {
const mainTracerConfig = concatTracerConfigs(tracedLanguages);
if (mainTracerConfig.spec) {
for (let entry of Object.entries(mainTracerConfig.env)) {
core.exportVariable(entry[0], entry[1]);
}
core.exportVariable('ODASA_TRACER_CONFIGURATION', mainTracerConfig.spec);
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === 'darwin') {
core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeQLDir, 'tools', 'osx64', 'libtrace.dylib'));
core.exportVariable('DYLD_INSERT_LIBRARIES', path.join(codeql.getDir(), 'tools', 'osx64', 'libtrace.dylib'));
}
else if (process.platform === 'win32') {
await exec.exec('powershell', [
path.resolve(__dirname, '..', 'src', 'inject-tracer.ps1'),
path.resolve(codeQLDir, 'tools', 'win64', 'tracer.exe'),
path.resolve(codeql.getDir(), 'tools', 'win64', 'tracer.exe'),
], { env: { 'ODASA_TRACER_CONFIGURATION': mainTracerConfig.spec } });
}
else {
core.exportVariable('LD_PRELOAD', path.join(codeQLDir, 'tools', 'linux64', '${LIB}trace.so'));
core.exportVariable('LD_PRELOAD', path.join(codeql.getDir(), 'tools', 'linux64', '${LIB}trace.so'));
}
}
}
core.exportVariable(sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES, scannedLanguages.join(','));
core.exportVariable(sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES, tracedLanguageKeys.join(','));
// TODO: make this a "private" environment variable of the action
core.exportVariable(sharedEnv.CODEQL_ACTION_DATABASE_DIR, databaseFolder);
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await util.sendStatusReport(await util.createStatusReportBase('init', 'failure', startedAt, error.message, error.stack));
await util.reportActionFailed('init', error.message, error.stack);
return;
}
await sendSuccessStatusReport(startedAt, config);
await util.reportActionSucceeded('init');
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
}
run().catch(e => {
core.setFailed("init action failed: " + e);

File diff suppressed because one or more lines are too long

View File

@@ -1,10 +1,17 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
exports.CODEQL_ACTION_LANGUAGES = 'CODEQL_ACTION_LANGUAGES';
exports.CODEQL_ACTION_ANALYSIS_KEY = 'CODEQL_ACTION_ANALYSIS_KEY';
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
// The time at which the first action (normally init) started executing.
// If a workflow invokes a different action without first invoking the init
// action (i.e. the upload action is being used by a third-party integrator)
// then this variable will be assigned the start time of the action invoked
// rather that the init action.
exports.CODEQL_WORKFLOW_STARTED_AT = 'CODEQL_WORKFLOW_STARTED_AT';
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
// Populated when the init action completes successfully
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
//# sourceMappingURL=shared-environment.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AACpD,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,+BAA+B,GAAG,iCAAiC,CAAC;AACpE,QAAA,8BAA8B,GAAG,gCAAgC,CAAC;AAC/E,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,wBAAwB,GAAG,0BAA0B,CAAC;AACnE,wDAAwD;AAC3C,QAAA,4BAA4B,GAAG,8BAA8B,CAAC"}

27
lib/testing-utils.js generated
View File

@@ -2,16 +2,8 @@
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const sinon_1 = __importDefault(require("sinon"));
const CodeQL = __importStar(require("./codeql"));
function wrapOutput(context) {
// Function signature taken from Socket.write.
// Note there are two overloads:
@@ -40,10 +32,6 @@ function wrapOutput(context) {
function setupTests(test) {
const typedTest = test;
typedTest.beforeEach(t => {
// Set an empty CodeQL object so that all method calls will fail
// unless the test explicitly sets one up.
CodeQL.setCodeQL({});
// Replace stdout and stderr so we can record output during tests
t.context.testOutput = "";
const processStdoutWrite = process.stdout.write.bind(process.stdout);
t.context.stdoutWrite = processStdoutWrite;
@@ -51,27 +39,16 @@ function setupTests(test) {
const processStderrWrite = process.stderr.write.bind(process.stderr);
t.context.stderrWrite = processStderrWrite;
process.stderr.write = wrapOutput(t.context);
// Many tests modify environment variables. Take a copy now so that
// we reset them after the test to keep tests independent of each other.
// process.env only has strings fields, so a shallow copy is fine.
t.context.env = {};
Object.assign(t.context.env, process.env);
// Any test that runs code that expects to only be run on actions
// will depend on various environment variables.
process.env['GITHUB_API_URL'] = 'https://github.localhost/api/v3';
});
typedTest.afterEach.always(t => {
// Restore stdout and stderr
// The captured output is only replayed if the test failed
process.stdout.write = t.context.stdoutWrite;
process.stderr.write = t.context.stderrWrite;
if (!t.passed) {
process.stdout.write(t.context.testOutput);
}
// Undo any modifications made by sinon
});
typedTest.afterEach.always(() => {
sinon_1.default.restore();
// Undo any modifications to the env
process.env = t.context.env;
});
}
exports.setupTests = setupTests;

View File

@@ -1 +1 @@
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AACA,kDAA0B;AAE1B,iDAAmC;AAInC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CAAC,KAA0B,EAAE,QAAiB,EAAE,EAA0B,EAAW,EAAE;QAC5F,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACvB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;QAE1C,iEAAiE;QACjE,gDAAgD;QAChD,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,iCAAiC,CAAC;IACpE,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;QAC7B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,eAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AA3CD,gCA2CC"}
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;AACA,kDAA0B;AAI1B,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CAAC,KAA0B,EAAE,QAAiB,EAAE,EAA0B,EAAW,EAAE;QAC5F,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE;QACvB,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAE1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;IACtD,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;QAC7B,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAE7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;IACH,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,GAAG,EAAE;QAC9B,eAAK,CAAC,OAAO,EAAE,CAAC;IAClB,CAAC,CAAC,CAAC;AACL,CAAC;AA3BD,gCA2BC"}

182
lib/upload-lib.js generated
View File

@@ -43,43 +43,41 @@ function combineSarifFiles(sarifFiles) {
exports.combineSarifFiles = combineSarifFiles;
// Upload the given payload.
// If the request fails then this will retry a small number of times.
async function uploadPayload(payload, repositoryNwo, githubAuth, githubApiUrl, mode, logger) {
logger.info('Uploading results');
async function uploadPayload(payload) {
core.info('Uploading results');
// If in test mode we don't want to upload the results
const testMode = process.env['TEST_MODE'] === 'true' || false;
if (testMode) {
return;
return true;
}
const [owner, repo] = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
// Make up to 4 attempts to upload, and sleep for these
// number of seconds between each attempt.
// We don't want to backoff too much to avoid wasting action
// minutes, but just waiting a little bit could maybe help.
const backoffPeriods = [1, 5, 15];
const client = api.getApiClient(githubAuth, githubApiUrl);
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
const reqURL = mode === 'actions'
? 'PUT /repos/:owner/:repo/code-scanning/analysis'
: 'POST /repos/:owner/:repo/code-scanning/sarifs';
const response = await client.request(reqURL, ({
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
const response = await api.getApiClient().request("PUT /repos/:owner/:repo/code-scanning/analysis", ({
owner: owner,
repo: repo,
data: payload,
}));
logger.debug('response status: ' + response.status);
core.debug('response status: ' + response.status);
const statusCode = response.status;
if (statusCode === 202) {
logger.info("Successfully uploaded results");
return;
core.info("Successfully uploaded results");
return true;
}
const requestID = response.headers["x-github-request-id"];
// On any other status code that's not 5xx mark the upload as failed
if (!statusCode || statusCode < 500 || statusCode >= 600) {
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
core.setFailed('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
return false;
}
// On a 5xx status code we may retry the request
if (attempt < backoffPeriods.length) {
// Log the failure as a warning but don't mark the action as failed yet
logger.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
' seconds: (' + statusCode + ') ' + JSON.stringify(response.data));
// Sleep for the backoff period
@@ -90,37 +88,29 @@ async function uploadPayload(payload, repositoryNwo, githubAuth, githubApiUrl, m
// If the upload fails with 5xx then we assume it is a temporary problem
// and not an error that the user has caused or can fix.
// We avoid marking the job as failed to avoid breaking CI workflows.
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
core.error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
return false;
}
console.log("X-RateLimit-Limit = " + response.headers["X-RateLimit-Limit"]);
console.log("X-RateLimit-Remaining = " + response.headers["X-RateLimit-Remaining"]);
throw new Error('Abort here for testing purposes...');
}
// This case shouldn't ever happen as the final iteration of the loop
// will always throw an error instead of exiting to here.
throw new Error('Upload failed');
return false;
}
// Uploads a single sarif file or a directory of sarif files
// depending on what the path happens to refer to.
// Returns true iff the upload occurred and succeeded
async function upload(sarifPath, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubApiUrl, mode, logger) {
const sarifFiles = [];
if (!fs.existsSync(sarifPath)) {
throw new Error(`Path does not exist: ${sarifPath}`);
}
if (fs.lstatSync(sarifPath).isDirectory()) {
fs.readdirSync(sarifPath)
async function upload(input) {
if (fs.lstatSync(input).isDirectory()) {
const sarifFiles = fs.readdirSync(input)
.filter(f => f.endsWith(".sarif"))
.map(f => path.resolve(sarifPath, f))
.forEach(f => sarifFiles.push(f));
.map(f => path.resolve(input, f));
if (sarifFiles.length === 0) {
throw new Error("No SARIF files found to upload in \"" + sarifPath + "\".");
core.setFailed("No SARIF files found to upload in \"" + input + "\".");
return false;
}
return await uploadFiles(sarifFiles);
}
else {
sarifFiles.push(sarifPath);
return await uploadFiles([input]);
}
return await uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubApiUrl, mode, logger);
}
exports.upload = upload;
// Counts the number of results in the given SARIF file
@@ -133,83 +123,87 @@ function countResultsInSarif(sarif) {
}
exports.countResultsInSarif = countResultsInSarif;
// Validates that the given file path refers to a valid SARIF file.
// Throws an error if the file is invalid.
function validateSarifFileSchema(sarifFilePath, logger) {
// Returns a non-empty list of error message if the file is invalid,
// otherwise returns the empty list if the file is valid.
function validateSarifFileSchema(sarifFilePath) {
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
const schema = require('../src/sarif_v2.1.0_schema.json');
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
const result = new jsonschema.Validator().validate(sarif, schema);
if (!result.valid) {
// Output the more verbose error messages in groups as these may be very large.
for (const error of result.errors) {
logger.startGroup("Error details: " + error.stack);
logger.info(JSON.stringify(error, null, 2));
logger.endGroup();
}
// Set the main error message to the stacks of all the errors.
if (result.valid) {
return true;
}
else {
// Set the failure message to the stacks of all the errors.
// This should be of a manageable size and may even give enough to fix the error.
const sarifErrors = result.errors.map(e => "- " + e.stack);
throw new Error("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + sarifErrors.join("\n"));
const errorMessages = result.errors.map(e => "- " + e.stack);
core.setFailed("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + errorMessages.join("\n"));
// Also output the more verbose error messages in groups as these may be very large.
for (const error of result.errors) {
core.startGroup("Error details: " + error.stack);
core.info(JSON.stringify(error, null, 2));
core.endGroup();
}
return false;
}
}
exports.validateSarifFileSchema = validateSarifFileSchema;
// Uploads the given set of sarif files.
// Returns true iff the upload occurred and succeeded
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubApiUrl, mode, logger) {
logger.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
if (mode === 'actions') {
// This check only works on actions as env vars don't persist between calls to the CLI
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
if (process.env[sentinelEnvVar]) {
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
async function uploadFiles(sarifFiles) {
core.startGroup("Uploading results");
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
if (process.env[sentinelEnvVar]) {
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
return false;
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
// Validate that the files we were asked to upload are all valid SARIF files
for (const file of sarifFiles) {
validateSarifFileSchema(file, logger);
if (!validateSarifFileSchema(file)) {
return false;
}
}
const commitOid = await util.getCommitOid();
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
const ref = util.getRef();
const analysisKey = await util.getAnalysisKey();
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
let sarifPayload = combineSarifFiles(sarifFiles);
sarifPayload = fingerprints.addFingerprints(sarifPayload, logger);
sarifPayload = fingerprints.addFingerprints(sarifPayload);
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
let checkoutPath = core.getInput('checkout_path');
let checkoutURI = file_url_1.default(checkoutPath);
const workflowRunID = parseInt(workflowRunIDStr, 10);
if (Number.isNaN(workflowRunID)) {
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
return false;
}
let matrix = core.getInput('matrix');
if (matrix === "null" || matrix === "") {
matrix = undefined;
}
const toolNames = util.getToolNames(sarifPayload);
let payload;
if (mode === 'actions') {
payload = JSON.stringify({
"commit_oid": commitOid,
"ref": ref,
"analysis_key": analysisKey,
"analysis_name": analysisName,
"sarif": zipped_sarif,
"workflow_run_id": workflowRunID,
"checkout_uri": checkoutURI,
"environment": environment,
"started_at": process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT],
"tool_names": toolNames,
});
}
else {
payload = JSON.stringify({
"commit_sha": commitOid,
"ref": ref,
"sarif": zipped_sarif,
"checkout_uri": checkoutURI,
"tool_name": toolNames[0],
});
}
const payload = JSON.stringify({
"commit_oid": commitOid,
"ref": ref,
"analysis_key": analysisKey,
"analysis_name": analysisName,
"sarif": zipped_sarif,
"workflow_run_id": workflowRunID,
"checkout_uri": checkoutURI,
"environment": matrix,
"started_at": startedAt,
"tool_names": toolNames,
});
// Log some useful debug info about the info
const rawUploadSizeBytes = sarifPayload.length;
logger.debug("Raw upload size: " + rawUploadSizeBytes + " bytes");
const zippedUploadSizeBytes = zipped_sarif.length;
logger.debug("Base64 zipped upload size: " + zippedUploadSizeBytes + " bytes");
const numResultInSarif = countResultsInSarif(sarifPayload);
logger.debug("Number of results in upload: " + numResultInSarif);
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
// Make the upload
await uploadPayload(payload, repositoryNwo, githubAuth, githubApiUrl, mode, logger);
return {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
};
const succeeded = await uploadPayload(payload);
core.endGroup();
return succeeded;
}
//# sourceMappingURL=upload-lib.js.map

File diff suppressed because one or more lines are too long

View File

@@ -11,16 +11,17 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const uploadLib = __importStar(require("./upload-lib"));
testing_utils_1.setupTests(ava_1.default);
ava_1.default('validateSarifFileSchema - valid', t => {
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getCLILogger()));
t.true(uploadLib.validateSarifFileSchema(inputFile));
});
ava_1.default('validateSarifFileSchema - invalid', t => {
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
t.throws(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getCLILogger()));
t.false(uploadLib.validateSarifFileSchema(inputFile));
// validateSarifFileSchema calls core.setFailed which sets the exit code on error
process.exitCode = 0;
});
//# sourceMappingURL=upload-lib.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,uCAAyC;AACzC,mDAA2C;AAC3C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,sBAAY,EAAE,CAAC,CAAC,CAAC;AAClF,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,sBAAY,EAAE,CAAC,CAAC,CAAC;AAC/E,CAAC,CAAC,CAAC"}
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,mDAA2C;AAC3C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;IACtD,iFAAiF;IACjF,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC"}

24
lib/upload-sarif.js generated
View File

@@ -8,31 +8,23 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
async function sendSuccessStatusReport(startedAt, uploadStats) {
const statusReportBase = await util.createStatusReportBase('upload-sarif', 'success', startedAt);
const statusReport = {
...statusReportBase,
...uploadStats,
};
await util.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
if (!await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'starting', startedAt), true)) {
if (util.should_abort('upload-sarif', false) || !await util.reportActionStarting('upload-sarif')) {
return;
}
try {
const uploadStats = await upload_lib.upload(core.getInput('sarif_file'), repository_1.parseRepositoryNwo(util.getRequiredEnvParam('GITHUB_REPOSITORY')), await util.getCommitOid(), util.getRef(), await util.getAnalysisKey(), util.getRequiredEnvParam('GITHUB_WORKFLOW'), util.getWorkflowRunID(), core.getInput('checkout_path'), core.getInput('matrix'), core.getInput('token'), util.getRequiredEnvParam('GITHUB_API_URL'), 'actions', logging_1.getActionsLogger());
await sendSuccessStatusReport(startedAt, uploadStats);
if (await upload_lib.upload(core.getInput('sarif_file'))) {
await util.reportActionSucceeded('upload-sarif');
}
else {
await util.reportActionFailed('upload-sarif', 'upload');
}
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'failure', startedAt, error.message, error.stack));
await util.reportActionFailed('upload-sarif', error.message, error.stack);
return;
}
}

View File

@@ -1 +1 @@
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAI/B,KAAK,UAAU,uBAAuB,CAAC,SAAe,EAAE,WAA0C;IAChG,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,cAAc,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IACjG,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAI,WAAW;KAChB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,cAAc,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;QAChH,OAAO;KACR;IAED,IAAI;QACF,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,EAC3B,+BAAkB,CAAC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACjE,MAAM,IAAI,CAAC,YAAY,EAAE,EACzB,IAAI,CAAC,MAAM,EAAE,EACb,MAAM,IAAI,CAAC,cAAc,EAAE,EAC3B,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAC3C,IAAI,CAAC,gBAAgB,EAAE,EACvB,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC,EAC9B,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EACvB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EACtB,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,CAAC,EAC1C,SAAS,EACT,0BAAgB,EAAE,CAAC,CAAC;QACtB,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KAEvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAC3D,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;QAChB,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,yDAA2C;AAC3C,6CAA+B;AAE/B,KAAK,UAAU,GAAG;IAChB,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,KAAK,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,cAAc,CAAC,EAAE;QAChG,OAAO;KACR;IAED,IAAI;QACF,IAAI,MAAM,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE;YACxD,MAAM,IAAI,CAAC,qBAAqB,CAAC,cAAc,CAAC,CAAC;SAClD;aAAM;YACL,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;SACzD;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QAC1E,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

278
lib/util.js generated
View File

@@ -15,24 +15,27 @@ const path = __importStar(require("path"));
const api = __importStar(require("./api-client"));
const sharedEnv = __importStar(require("./shared-environment"));
/**
* The API URL for github.com.
* Should the current action be aborted?
*
* This method should be called at the start of all CodeQL actions and they
* should abort cleanly if this returns true without failing the action.
* This method will call `core.setFailed` if necessary.
*/
exports.GITHUB_DOTCOM_API_URL = "https://api.github.com";
/**
* Get the API URL for the GitHub instance we are connected to.
* May be for github.com or for an enterprise instance.
*/
function getInstanceAPIURL() {
return process.env["GITHUB_API_URL"] || exports.GITHUB_DOTCOM_API_URL;
function should_abort(actionName, requireInitActionHasRun) {
// Check that required aspects of the environment are present
const ref = process.env['GITHUB_REF'];
if (ref === undefined) {
core.setFailed('GITHUB_REF must be set.');
return true;
}
// If the init action is required, then check the it completed successfully.
if (requireInitActionHasRun && process.env[sharedEnv.CODEQL_ACTION_INIT_COMPLETED] === undefined) {
core.setFailed('The CodeQL ' + actionName + ' action cannot be used unless the CodeQL init action is run first. Aborting.');
return true;
}
return false;
}
exports.getInstanceAPIURL = getInstanceAPIURL;
/**
* Are we running against a GitHub Enterpise instance, as opposed to github.com.
*/
function isEnterprise() {
return getInstanceAPIURL() !== exports.GITHUB_DOTCOM_API_URL;
}
exports.isEnterprise = isEnterprise;
exports.should_abort = should_abort;
/**
* Get an environment parameter, but throw an error if it is not set.
*/
@@ -46,43 +49,82 @@ function getRequiredEnvParam(paramName) {
}
exports.getRequiredEnvParam = getRequiredEnvParam;
/**
* Get the extra options for the codeql commands.
* Gets the set of languages in the current repository
*/
function getExtraOptionsEnvParam() {
const varName = 'CODEQL_ACTION_EXTRA_OPTIONS';
const raw = process.env[varName];
if (raw === undefined || raw.length === 0) {
return {};
async function getLanguagesInRepo() {
var _a;
// Translate between GitHub's API names for languages and ours
const codeqlLanguages = {
'C': 'cpp',
'C++': 'cpp',
'C#': 'csharp',
'Go': 'go',
'Java': 'java',
'JavaScript': 'javascript',
'TypeScript': 'javascript',
'Python': 'python',
};
let repo_nwo = (_a = process.env['GITHUB_REPOSITORY']) === null || _a === void 0 ? void 0 : _a.split("/");
if (repo_nwo) {
let owner = repo_nwo[0];
let repo = repo_nwo[1];
core.debug(`GitHub repo ${owner} ${repo}`);
const response = await api.getApiClient().request("GET /repos/:owner/:repo/languages", ({
owner,
repo
}));
core.debug("Languages API response: " + JSON.stringify(response));
// The GitHub API is going to return languages in order of popularity,
// When we pick a language to autobuild we want to pick the most popular traced language
// Since sets in javascript maintain insertion order, using a set here and then splatting it
// into an array gives us an array of languages ordered by popularity
let languages = new Set();
for (let lang in response.data) {
if (lang in codeqlLanguages) {
languages.add(codeqlLanguages[lang]);
}
}
return [...languages];
}
try {
return JSON.parse(raw);
}
catch (e) {
throw new Error(varName +
' environment variable is set, but does not contain valid JSON: ' +
e.message);
else {
return [];
}
}
exports.getExtraOptionsEnvParam = getExtraOptionsEnvParam;
function isLocalRun() {
return !!process.env.CODEQL_LOCAL_RUN
&& process.env.CODEQL_LOCAL_RUN !== 'false'
&& process.env.CODEQL_LOCAL_RUN !== '0';
}
exports.isLocalRun = isLocalRun;
/**
* Ensures all required environment variables are set in the context of a local run.
* Get the languages to analyse.
*
* The result is obtained from the environment parameter CODEQL_ACTION_LANGUAGES
* if that has been set, otherwise it is obtained from the action input parameter
* 'languages' if that has been set, otherwise it is deduced as all languages in the
* repo that can be analysed.
*
* If the languages are obtained from either of the second choices, the
* CODEQL_ACTION_LANGUAGES environment variable will be exported with the
* deduced list.
*/
function prepareLocalRunEnvironment() {
if (!isLocalRun()) {
return;
async function getLanguages() {
// Obtain from CODEQL_ACTION_LANGUAGES if set
const langsVar = process.env[sharedEnv.CODEQL_ACTION_LANGUAGES];
if (langsVar) {
return langsVar.split(',')
.map(x => x.trim())
.filter(x => x.length > 0);
}
core.debug('Action is running locally.');
if (!process.env.GITHUB_JOB) {
core.exportVariable('GITHUB_JOB', 'UNKNOWN-JOB');
// Obtain from action input 'languages' if set
let languages = core.getInput('languages', { required: false })
.split(',')
.map(x => x.trim())
.filter(x => x.length > 0);
core.info("Languages from configuration: " + JSON.stringify(languages));
if (languages.length === 0) {
// Obtain languages as all languages in the repo that can be analysed
languages = await getLanguagesInRepo();
core.info("Automatically detected languages: " + JSON.stringify(languages));
}
core.exportVariable(sharedEnv.CODEQL_ACTION_LANGUAGES, languages.join(','));
return languages;
}
exports.prepareLocalRunEnvironment = prepareLocalRunEnvironment;
exports.getLanguages = getLanguages;
/**
* Gets the SHA of the commit that is currently checked out.
*/
@@ -119,7 +161,7 @@ async function getWorkflowPath() {
const owner = repo_nwo[0];
const repo = repo_nwo[1];
const run_id = Number(getRequiredEnvParam('GITHUB_RUN_ID'));
const apiClient = api.getActionsApiClient();
const apiClient = api.getApiClient();
const runsResponse = await apiClient.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
owner,
repo,
@@ -129,17 +171,6 @@ async function getWorkflowPath() {
const workflowResponse = await apiClient.request('GET ' + workflowUrl);
return workflowResponse.data.path;
}
/**
* Get the workflow run ID.
*/
function getWorkflowRunID() {
const workflowRunID = parseInt(getRequiredEnvParam('GITHUB_RUN_ID'), 10);
if (Number.isNaN(workflowRunID)) {
throw new Error('GITHUB_RUN_ID must define a non NaN workflow run ID');
}
return workflowRunID;
}
exports.getWorkflowRunID = getWorkflowRunID;
/**
* Get the analysis key paramter for the current job.
*
@@ -148,15 +179,14 @@ exports.getWorkflowRunID = getWorkflowRunID;
* the github API, but after that the result will be cached.
*/
async function getAnalysisKey() {
const analysisKeyEnvVar = 'CODEQL_ACTION_ANALYSIS_KEY';
let analysisKey = process.env[analysisKeyEnvVar];
let analysisKey = process.env[sharedEnv.CODEQL_ACTION_ANALYSIS_KEY];
if (analysisKey !== undefined) {
return analysisKey;
}
const workflowPath = await getWorkflowPath();
const jobName = getRequiredEnvParam('GITHUB_JOB');
analysisKey = workflowPath + ':' + jobName;
core.exportVariable(analysisKeyEnvVar, analysisKey);
core.exportVariable(sharedEnv.CODEQL_ACTION_ANALYSIS_KEY, analysisKey);
return analysisKey;
}
exports.getAnalysisKey = getAnalysisKey;
@@ -185,11 +215,10 @@ exports.getRef = getRef;
*
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
* @param status The status. Must be 'success', 'failure', or 'starting'
* @param startedAt The time this action started executing.
* @param cause Cause of failure (only supply if status is 'failure')
* @param exception Exception (only supply if status is 'failure')
*/
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
async function createStatusReport(actionName, status, cause, exception) {
const commitOid = process.env['GITHUB_SHA'] || '';
const ref = getRef();
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
@@ -200,22 +229,20 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
const workflowName = process.env['GITHUB_WORKFLOW'] || '';
const jobName = process.env['GITHUB_JOB'] || '';
const analysis_key = await getAnalysisKey();
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
if (workflowStartedAt === undefined) {
workflowStartedAt = actionStartedAt.toISOString();
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
}
const languages = (await getLanguages()).sort().join(',');
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT] || new Date().toISOString();
core.exportVariable(sharedEnv.CODEQL_ACTION_STARTED_AT, startedAt);
let statusReport = {
workflow_run_id: workflowRunID,
workflow_name: workflowName,
job_name: jobName,
analysis_key: analysis_key,
languages: languages,
commit_oid: commitOid,
ref: ref,
action_name: actionName,
action_oid: "unknown",
started_at: workflowStartedAt,
action_started_at: actionStartedAt.toISOString(),
started_at: startedAt,
status: status
};
// Add optional parameters
@@ -234,54 +261,80 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
}
return statusReport;
}
exports.createStatusReportBase = createStatusReportBase;
/**
* Send a status report to the code_scanning/analysis/status endpoint.
*
* Optionally checks the response from the API endpoint and sets the action
* as failed if the status report failed. This is only expected to be used
* when sending a 'starting' report.
*
* Returns whether sending the status report was successful of not.
* Returns the status code of the response to the status request.
*/
async function sendStatusReport(statusReport, ignoreFailures) {
if (isEnterprise()) {
core.debug("Not sending status report to GitHub Enterprise");
return true;
}
if (isLocalRun()) {
core.debug("Not sending status report because this is a local run");
return true;
}
async function sendStatusReport(statusReport) {
const statusReportJSON = JSON.stringify(statusReport);
core.debug('Sending status report: ' + statusReportJSON);
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
const [owner, repo] = nwo.split("/");
const client = api.getActionsApiClient();
const statusResponse = await client.request('PUT /repos/:owner/:repo/code-scanning/analysis/status', {
const statusResponse = await api.getApiClient().request('PUT /repos/:owner/:repo/code-scanning/analysis/status', {
owner: owner,
repo: repo,
data: statusReportJSON,
});
if (!ignoreFailures) {
// If the status report request fails with a 403 or a 404, then this is a deliberate
// message from the endpoint that the SARIF upload can be expected to fail too,
// so the action should fail to avoid wasting actions minutes.
//
// Other failure responses (or lack thereof) could be transitory and should not
// cause the action to fail.
if (statusResponse.status === 403) {
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
return false;
}
if (statusResponse.status === 404) {
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
return false;
}
return statusResponse.status;
}
/**
* Send a status report that an action is starting.
*
* If the action is `init` then this also records the start time in the environment,
* and ensures that the analysed languages are also recorded in the envirenment.
*
* Returns true unless a problem occurred and the action should abort.
*/
async function reportActionStarting(action) {
const statusCode = await sendStatusReport(await createStatusReport(action, 'starting'));
// If the status report request fails with a 403 or a 404, then this is a deliberate
// message from the endpoint that the SARIF upload can be expected to fail too,
// so the action should fail to avoid wasting actions minutes.
//
// Other failure responses (or lack thereof) could be transitory and should not
// cause the action to fail.
if (statusCode === 403) {
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
return false;
}
if (statusCode === 404) {
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
return false;
}
return true;
}
exports.sendStatusReport = sendStatusReport;
exports.reportActionStarting = reportActionStarting;
/**
* Report that an action has failed.
*
* Note that the started_at date is always that of the `init` action, since
* this is likely to give a more useful duration when inspecting events.
*/
async function reportActionFailed(action, cause, exception) {
await sendStatusReport(await createStatusReport(action, 'failure', cause, exception));
}
exports.reportActionFailed = reportActionFailed;
/**
* Report that an action has succeeded.
*
* Note that the started_at date is always that of the `init` action, since
* this is likely to give a more useful duration when inspecting events.
*/
async function reportActionSucceeded(action) {
await sendStatusReport(await createStatusReport(action, 'success'));
}
exports.reportActionSucceeded = reportActionSucceeded;
/**
* Report that an action has been aborted.
*
* Note that the started_at date is always that of the `init` action, since
* this is likely to give a more useful duration when inspecting events.
*/
async function reportActionAborted(action, cause) {
await sendStatusReport(await createStatusReport(action, 'aborted', cause));
}
exports.reportActionAborted = reportActionAborted;
/**
* Get the array of all the tool names contained in the given sarif contents.
*
@@ -338,44 +391,29 @@ function getMemoryFlag() {
}
exports.getMemoryFlag = getMemoryFlag;
/**
* Get the codeql `--threads` value specified for the `threads` input.
* If not value was specified, all available threads will be used.
*
* The value will be capped to the number of available CPUs.
* Get the codeql `--threads` value specified for the `threads` input. The value
* defaults to 1. The value will be capped to the number of available CPUs.
*
* @returns string
*/
function getThreadsFlag() {
let numThreads;
let numThreads = 1;
const numThreadsString = core.getInput("threads");
const maxThreads = os.cpus().length;
if (numThreadsString) {
numThreads = Number(numThreadsString);
if (Number.isNaN(numThreads)) {
throw new Error(`Invalid threads setting "${numThreadsString}", specified.`);
}
const maxThreads = os.cpus().length;
if (numThreads > maxThreads) {
core.info(`Clamping desired number of threads (${numThreads}) to max available (${maxThreads}).`);
numThreads = maxThreads;
}
const minThreads = -maxThreads;
if (numThreads < minThreads) {
core.info(`Clamping desired number of free threads (${numThreads}) to max available (${minThreads}).`);
numThreads = minThreads;
}
}
else {
// Default to using all threads
numThreads = maxThreads;
}
return `--threads=${numThreads}`;
}
exports.getThreadsFlag = getThreadsFlag;
/**
* Get the directory where CodeQL databases should be placed.
*/
function getCodeQLDatabasesDir(tempDir) {
return path.resolve(tempDir, 'codeql_databases');
}
exports.getCodeQLDatabasesDir = getCodeQLDatabasesDir;
//# sourceMappingURL=util.js.map

File diff suppressed because one or more lines are too long

52
lib/util.test.js generated
View File

@@ -61,56 +61,4 @@ ava_1.default('getRef() throws on the empty string', t => {
process.env["GITHUB_REF"] = "";
t.throws(util.getRef);
});
ava_1.default('isLocalRun() runs correctly', t => {
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
process.env.CODEQL_LOCAL_RUN = '';
t.assert(!util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = 'false';
t.assert(!util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = '0';
t.assert(!util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = 'true';
t.assert(util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = 'hucairz';
t.assert(util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = origLocalRun;
});
ava_1.default('prepareEnvironment() when a local run', t => {
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
process.env.CODEQL_LOCAL_RUN = 'false';
process.env.GITHUB_JOB = 'YYY';
util.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, 'YYY');
process.env.CODEQL_LOCAL_RUN = 'true';
util.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, 'YYY');
process.env.GITHUB_JOB = '';
util.prepareLocalRunEnvironment();
// updated
t.deepEqual(process.env.GITHUB_JOB, 'UNKNOWN-JOB');
process.env.CODEQL_LOCAL_RUN = origLocalRun;
});
ava_1.default('getExtraOptionsEnvParam() succeeds on valid JSON with invalid options (for now)', t => {
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
const options = { foo: 42 };
process.env.CODEQL_ACTION_EXTRA_OPTIONS = JSON.stringify(options);
t.deepEqual(util.getExtraOptionsEnvParam(), options);
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
});
ava_1.default('getExtraOptionsEnvParam() succeeds on valid options', t => {
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
const options = { database: { init: ["--debug"] } };
process.env.CODEQL_ACTION_EXTRA_OPTIONS =
JSON.stringify(options);
t.deepEqual(util.getExtraOptionsEnvParam(), options);
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
});
ava_1.default('getExtraOptionsEnvParam() fails on invalid JSON', t => {
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
process.env.CODEQL_ACTION_EXTRA_OPTIONS = "{{invalid-json}}";
t.throws(util.getExtraOptionsEnvParam);
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
});
//# sourceMappingURL=util.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"util.test.js","sourceRoot":"","sources":["../src/util.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,uCAAyB;AAEzB,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACvB,MAAM,KAAK,GAAG,EAAE,CAAC,YAAY,CAAC,SAAS,GAAG,mCAAmC,EAAE,MAAM,CAAC,CAAC;IACvF,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,CAAC;AACtE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,sDAAsD,EAAE,CAAC,CAAC,EAAE;IAE/D,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC;IAE3D,MAAM,KAAK,GAAG;QACZ,EAAE,EAAE,SAAS,QAAQ,GAAG,GAAG,EAAE;QAC7B,KAAK,EAAE,WAAW;KACnB,CAAC;IAEF,KAAK,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAEzD,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC;QAEjC,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;QAClC,CAAC,CAAC,SAAS,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;KACjC;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,uDAAuD,EAAE,CAAC,CAAC,EAAE;IAChE,KAAK,MAAM,KAAK,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,EAAE;QACpC,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC;QACjC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;KAC9B;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,2DAA2D,EAAE,CAAC,CAAC,EAAE;IAEpE,MAAM,OAAO,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC;IAEjC,MAAM,KAAK,GAAG;QACZ,GAAG,EAAE,aAAa;QAClB,GAAG,EAAE,aAAa;QAClB,CAAC,GAAG,OAAO,GAAG,CAAC,EAAE,CAAC,EAAE,aAAa,OAAO,EAAE;QAC1C,CAAC,GAAG,CAAC,OAAO,GAAG,CAAC,EAAE,CAAC,EAAE,aAAa,CAAC,OAAO,EAAE;KAC7C,CAAC;IAEF,KAAK,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAEzD,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,KAAK,CAAC;QAErC,MAAM,IAAI,GAAG,IAAI,CAAC,cAAc,EAAE,CAAC;QACnC,CAAC,CAAC,SAAS,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;KACjC;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gEAAgE,EAAE,CAAC,CAAC,EAAE;IACzE,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,QAAQ,CAAC;IACxC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;AAChC,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE;IAC9C,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;IAC/B,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACxB,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,6BAA6B,EAAE,CAAC,CAAC,EAAE;IACtC,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC;IAElD,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,EAAE,CAAC;IAClC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE7B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,OAAO,CAAC;IACvC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE7B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,GAAG,CAAC;IACnC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE7B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,MAAM,CAAC;IACtC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE5B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,SAAS,CAAC;IACzC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAE5B,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,YAAY,CAAC;AAC9C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,uCAAuC,EAAE,CAAC,CAAC,EAAE;IAChD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC;IAElD,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,OAAO,CAAC;IACvC,OAAO,CAAC,GAAG,CAAC,UAAU,GAAG,KAAK,CAAC;IAE/B,IAAI,CAAC,0BAA0B,EAAE,CAAC;IAElC,YAAY;IACZ,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;IAE3C,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,MAAM,CAAC;IAEtC,IAAI,CAAC,0BAA0B,EAAE,CAAC;IAElC,YAAY;IACZ,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,KAAK,CAAC,CAAC;IAE3C,OAAO,CAAC,GAAG,CAAC,UAAU,GAAG,EAAE,CAAC;IAE5B,IAAI,CAAC,0BAA0B,EAAE,CAAC;IAElC,UAAU;IACV,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;IAEnD,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,YAAY,CAAC;AAC9C,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iFAAiF,EAAE,CAAC,CAAC,EAAE;IAC1F,MAAM,gBAAgB,GAAG,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC;IAEjE,MAAM,OAAO,GAAG,EAAC,GAAG,EAAE,EAAE,EAAC,CAAC;IAE1B,OAAO,CAAC,GAAG,CAAC,2BAA2B,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;IAElE,CAAC,CAAC,SAAS,CAAC,IAAI,CAAC,uBAAuB,EAAE,EAAO,OAAO,CAAC,CAAC;IAE1D,OAAO,CAAC,GAAG,CAAC,2BAA2B,GAAG,gBAAgB,CAAC;AAC7D,CAAC,CAAC,CAAC;AAGH,aAAI,CAAC,qDAAqD,EAAE,CAAC,CAAC,EAAE;IAC9D,MAAM,gBAAgB,GAAG,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC;IAEjE,MAAM,OAAO,GAAG,EAAE,QAAQ,EAAE,EAAE,IAAI,EAAE,CAAC,SAAS,CAAC,EAAE,EAAE,CAAC;IACpD,OAAO,CAAC,GAAG,CAAC,2BAA2B;QACrC,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;IAE1B,CAAC,CAAC,SAAS,CAAC,IAAI,CAAC,uBAAuB,EAAE,EAAE,OAAO,CAAC,CAAC;IAErD,OAAO,CAAC,GAAG,CAAC,2BAA2B,GAAG,gBAAgB,CAAC;AAC7D,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,iDAAiD,EAAE,CAAC,CAAC,EAAE;IAC1D,MAAM,gBAAgB,GAAG,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC;IAEjE,OAAO,CAAC,GAAG,CAAC,2BAA2B,GAAG,kBAAkB,CAAC;IAC7D,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;IAEvC,OAAO,CAAC,GAAG,CAAC,2BAA2B,GAAG,gBAAgB,CAAC;AAC7D,CAAC,CAAC,CAAC"}
{"version":3,"file":"util.test.js","sourceRoot":"","sources":["../src/util.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,uCAAyB;AAEzB,mDAA2C;AAC3C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACvB,MAAM,KAAK,GAAG,EAAE,CAAC,YAAY,CAAC,SAAS,GAAG,mCAAmC,EAAE,MAAM,CAAC,CAAC;IACvF,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,CAAC;AACtE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,sDAAsD,EAAE,CAAC,CAAC,EAAE;IAE/D,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC;IAE3D,MAAM,KAAK,GAAG;QACZ,EAAE,EAAE,SAAS,QAAQ,GAAG,GAAG,EAAE;QAC7B,KAAK,EAAE,WAAW;KACnB,CAAC;IAEF,KAAK,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAEzD,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC;QAEjC,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;QAClC,CAAC,CAAC,SAAS,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;KACjC;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,uDAAuD,EAAE,CAAC,CAAC,EAAE;IAChE,KAAK,MAAM,KAAK,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,EAAE;QACpC,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC;QACjC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;KAC9B;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,2DAA2D,EAAE,CAAC,CAAC,EAAE;IAEpE,MAAM,OAAO,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC;IAEjC,MAAM,KAAK,GAAG;QACZ,GAAG,EAAE,aAAa;QAClB,GAAG,EAAE,aAAa;QAClB,CAAC,GAAG,OAAO,GAAG,CAAC,EAAE,CAAC,EAAE,aAAa,OAAO,EAAE;QAC1C,CAAC,GAAG,CAAC,OAAO,GAAG,CAAC,EAAE,CAAC,EAAE,aAAa,CAAC,OAAO,EAAE;KAC7C,CAAC;IAEF,KAAK,MAAM,CAAC,KAAK,EAAE,YAAY,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QAEzD,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,KAAK,CAAC;QAErC,MAAM,IAAI,GAAG,IAAI,CAAC,cAAc,EAAE,CAAC;QACnC,CAAC,CAAC,SAAS,CAAC,IAAI,EAAE,YAAY,CAAC,CAAC;KACjC;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,gEAAgE,EAAE,CAAC,CAAC,EAAE;IACzE,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,QAAQ,CAAC;IACxC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;AAChC,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qCAAqC,EAAE,CAAC,CAAC,EAAE;IAC9C,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;IAC/B,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACxB,CAAC,CAAC,CAAC"}

1
node_modules/.bin/atob generated vendored
View File

@@ -1 +0,0 @@
../atob/bin/atob.js

1
node_modules/.bin/errno generated vendored
View File

@@ -1 +0,0 @@
../errno/cli.js

1
node_modules/.bin/json5 generated vendored
View File

@@ -1 +0,0 @@
../json5/lib/cli.js

1
node_modules/.bin/miller-rabin generated vendored
View File

@@ -1 +0,0 @@
../miller-rabin/bin/miller-rabin

1
node_modules/.bin/sha.js generated vendored
View File

@@ -1 +0,0 @@
../sha.js/bin.js

1
node_modules/.bin/terser generated vendored
View File

@@ -1 +0,0 @@
../terser/bin/terser

1
node_modules/.bin/webpack generated vendored
View File

@@ -1 +0,0 @@
../webpack/bin/webpack.js

1
node_modules/.bin/webpack-cli generated vendored
View File

@@ -1 +0,0 @@
../webpack-cli/bin/cli.js

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2018 Sven Sauleau <sven@sauleau.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,167 +0,0 @@
# @webassemblyjs/ast
> AST utils for webassemblyjs
## Installation
```sh
yarn add @webassemblyjs/ast
```
## Usage
### Traverse
```js
import { traverse } from "@webassemblyjs/ast";
traverse(ast, {
Module(path) {
console.log(path.node);
}
});
```
### Instruction signatures
```js
import { signatures } from "@webassemblyjs/ast";
console.log(signatures);
```
### Path methods
- `findParent: NodeLocator`
- `replaceWith: Node => void`
- `remove: () => void`
- `insertBefore: Node => void`
- `insertAfter: Node => void`
- `stop: () => void`
### AST utils
- function `module(id, fields, metadata)`
- function `moduleMetadata(sections, functionNames, localNames)`
- function `moduleNameMetadata(value)`
- function `functionNameMetadata(value, index)`
- function `localNameMetadata(value, localIndex, functionIndex)`
- function `binaryModule(id, blob)`
- function `quoteModule(id, string)`
- function `sectionMetadata(section, startOffset, size, vectorOfSize)`
- function `loopInstruction(label, resulttype, instr)`
- function `instruction(id, args, namedArgs)`
- function `objectInstruction(id, object, args, namedArgs)`
- function `ifInstruction(testLabel, test, result, consequent, alternate)`
- function `stringLiteral(value)`
- function `numberLiteralFromRaw(value, raw)`
- function `longNumberLiteral(value, raw)`
- function `floatLiteral(value, nan, inf, raw)`
- function `elem(table, offset, funcs)`
- function `indexInFuncSection(index)`
- function `valtypeLiteral(name)`
- function `typeInstruction(id, functype)`
- function `start(index)`
- function `globalType(valtype, mutability)`
- function `leadingComment(value)`
- function `blockComment(value)`
- function `data(memoryIndex, offset, init)`
- function `global(globalType, init, name)`
- function `table(elementType, limits, name, elements)`
- function `memory(limits, id)`
- function `funcImportDescr(id, signature)`
- function `moduleImport(module, name, descr)`
- function `moduleExportDescr(exportType, id)`
- function `moduleExport(name, descr)`
- function `limit(min, max)`
- function `signature(params, results)`
- function `program(body)`
- function `identifier(value, raw)`
- function `blockInstruction(label, instr, result)`
- function `callInstruction(index, instrArgs)`
- function `callIndirectInstruction(signature, intrs)`
- function `byteArray(values)`
- function `func(name, signature, body, isExternal, metadata)`
- Constant`isModule`
- Constant`isModuleMetadata`
- Constant`isModuleNameMetadata`
- Constant`isFunctionNameMetadata`
- Constant`isLocalNameMetadata`
- Constant`isBinaryModule`
- Constant`isQuoteModule`
- Constant`isSectionMetadata`
- Constant`isLoopInstruction`
- Constant`isInstruction`
- Constant`isObjectInstruction`
- Constant`isIfInstruction`
- Constant`isStringLiteral`
- Constant`isNumberLiteral`
- Constant`isLongNumberLiteral`
- Constant`isFloatLiteral`
- Constant`isElem`
- Constant`isIndexInFuncSection`
- Constant`isValtypeLiteral`
- Constant`isTypeInstruction`
- Constant`isStart`
- Constant`isGlobalType`
- Constant`isLeadingComment`
- Constant`isBlockComment`
- Constant`isData`
- Constant`isGlobal`
- Constant`isTable`
- Constant`isMemory`
- Constant`isFuncImportDescr`
- Constant`isModuleImport`
- Constant`isModuleExportDescr`
- Constant`isModuleExport`
- Constant`isLimit`
- Constant`isSignature`
- Constant`isProgram`
- Constant`isIdentifier`
- Constant`isBlockInstruction`
- Constant`isCallInstruction`
- Constant`isCallIndirectInstruction`
- Constant`isByteArray`
- Constant`isFunc`
- Constant`assertModule`
- Constant`assertModuleMetadata`
- Constant`assertModuleNameMetadata`
- Constant`assertFunctionNameMetadata`
- Constant`assertLocalNameMetadata`
- Constant`assertBinaryModule`
- Constant`assertQuoteModule`
- Constant`assertSectionMetadata`
- Constant`assertLoopInstruction`
- Constant`assertInstruction`
- Constant`assertObjectInstruction`
- Constant`assertIfInstruction`
- Constant`assertStringLiteral`
- Constant`assertNumberLiteral`
- Constant`assertLongNumberLiteral`
- Constant`assertFloatLiteral`
- Constant`assertElem`
- Constant`assertIndexInFuncSection`
- Constant`assertValtypeLiteral`
- Constant`assertTypeInstruction`
- Constant`assertStart`
- Constant`assertGlobalType`
- Constant`assertLeadingComment`
- Constant`assertBlockComment`
- Constant`assertData`
- Constant`assertGlobal`
- Constant`assertTable`
- Constant`assertMemory`
- Constant`assertFuncImportDescr`
- Constant`assertModuleImport`
- Constant`assertModuleExportDescr`
- Constant`assertModuleExport`
- Constant`assertLimit`
- Constant`assertSignature`
- Constant`assertProgram`
- Constant`assertIdentifier`
- Constant`assertBlockInstruction`
- Constant`assertCallInstruction`
- Constant`assertCallIndirectInstruction`
- Constant`assertByteArray`
- Constant`assertFunc`

View File

@@ -1,10 +0,0 @@
export function cloneNode(n) {
// $FlowIgnore
var newObj = {};
for (var k in n) {
newObj[k] = n[k];
}
return newObj;
}

View File

@@ -1,663 +0,0 @@
var definitions = {};
function defineType(typeName, metadata) {
definitions[typeName] = metadata;
}
defineType("Module", {
spec: {
wasm: "https://webassembly.github.io/spec/core/binary/modules.html#binary-module",
wat: "https://webassembly.github.io/spec/core/text/modules.html#text-module"
},
doc: "A module consists of a sequence of sections (termed fields in the text format).",
unionType: ["Node"],
fields: {
id: {
maybe: true,
type: "string"
},
fields: {
array: true,
type: "Node"
},
metadata: {
optional: true,
type: "ModuleMetadata"
}
}
});
defineType("ModuleMetadata", {
unionType: ["Node"],
fields: {
sections: {
array: true,
type: "SectionMetadata"
},
functionNames: {
optional: true,
array: true,
type: "FunctionNameMetadata"
},
localNames: {
optional: true,
array: true,
type: "ModuleMetadata"
},
producers: {
optional: true,
array: true,
type: "ProducersSectionMetadata"
}
}
});
defineType("ModuleNameMetadata", {
unionType: ["Node"],
fields: {
value: {
type: "string"
}
}
});
defineType("FunctionNameMetadata", {
unionType: ["Node"],
fields: {
value: {
type: "string"
},
index: {
type: "number"
}
}
});
defineType("LocalNameMetadata", {
unionType: ["Node"],
fields: {
value: {
type: "string"
},
localIndex: {
type: "number"
},
functionIndex: {
type: "number"
}
}
});
defineType("BinaryModule", {
unionType: ["Node"],
fields: {
id: {
maybe: true,
type: "string"
},
blob: {
array: true,
type: "string"
}
}
});
defineType("QuoteModule", {
unionType: ["Node"],
fields: {
id: {
maybe: true,
type: "string"
},
string: {
array: true,
type: "string"
}
}
});
defineType("SectionMetadata", {
unionType: ["Node"],
fields: {
section: {
type: "SectionName"
},
startOffset: {
type: "number"
},
size: {
type: "NumberLiteral"
},
vectorOfSize: {
comment: "Size of the vector in the section (if any)",
type: "NumberLiteral"
}
}
});
defineType("ProducersSectionMetadata", {
unionType: ["Node"],
fields: {
producers: {
array: true,
type: "ProducerMetadata"
}
}
});
defineType("ProducerMetadata", {
unionType: ["Node"],
fields: {
language: {
type: "ProducerMetadataVersionedName",
array: true
},
processedBy: {
type: "ProducerMetadataVersionedName",
array: true
},
sdk: {
type: "ProducerMetadataVersionedName",
array: true
}
}
});
defineType("ProducerMetadataVersionedName", {
unionType: ["Node"],
fields: {
name: {
type: "string"
},
version: {
type: "string"
}
}
});
/*
Instructions
*/
defineType("LoopInstruction", {
unionType: ["Node", "Block", "Instruction"],
fields: {
id: {
constant: true,
type: "string",
value: "loop"
},
label: {
maybe: true,
type: "Identifier"
},
resulttype: {
maybe: true,
type: "Valtype"
},
instr: {
array: true,
type: "Instruction"
}
}
});
defineType("Instr", {
unionType: ["Node", "Expression", "Instruction"],
fields: {
id: {
type: "string"
},
object: {
optional: true,
type: "Valtype"
},
args: {
array: true,
type: "Expression"
},
namedArgs: {
optional: true,
type: "Object"
}
}
});
defineType("IfInstruction", {
unionType: ["Node", "Instruction"],
fields: {
id: {
constant: true,
type: "string",
value: "if"
},
testLabel: {
comment: "only for WAST",
type: "Identifier"
},
test: {
array: true,
type: "Instruction"
},
result: {
maybe: true,
type: "Valtype"
},
consequent: {
array: true,
type: "Instruction"
},
alternate: {
array: true,
type: "Instruction"
}
}
});
/*
Concrete value types
*/
defineType("StringLiteral", {
unionType: ["Node", "Expression"],
fields: {
value: {
type: "string"
}
}
});
defineType("NumberLiteral", {
unionType: ["Node", "NumericLiteral", "Expression"],
fields: {
value: {
type: "number"
},
raw: {
type: "string"
}
}
});
defineType("LongNumberLiteral", {
unionType: ["Node", "NumericLiteral", "Expression"],
fields: {
value: {
type: "LongNumber"
},
raw: {
type: "string"
}
}
});
defineType("FloatLiteral", {
unionType: ["Node", "NumericLiteral", "Expression"],
fields: {
value: {
type: "number"
},
nan: {
optional: true,
type: "boolean"
},
inf: {
optional: true,
type: "boolean"
},
raw: {
type: "string"
}
}
});
defineType("Elem", {
unionType: ["Node"],
fields: {
table: {
type: "Index"
},
offset: {
array: true,
type: "Instruction"
},
funcs: {
array: true,
type: "Index"
}
}
});
defineType("IndexInFuncSection", {
unionType: ["Node"],
fields: {
index: {
type: "Index"
}
}
});
defineType("ValtypeLiteral", {
unionType: ["Node", "Expression"],
fields: {
name: {
type: "Valtype"
}
}
});
defineType("TypeInstruction", {
unionType: ["Node", "Instruction"],
fields: {
id: {
maybe: true,
type: "Index"
},
functype: {
type: "Signature"
}
}
});
defineType("Start", {
unionType: ["Node"],
fields: {
index: {
type: "Index"
}
}
});
defineType("GlobalType", {
unionType: ["Node", "ImportDescr"],
fields: {
valtype: {
type: "Valtype"
},
mutability: {
type: "Mutability"
}
}
});
defineType("LeadingComment", {
unionType: ["Node"],
fields: {
value: {
type: "string"
}
}
});
defineType("BlockComment", {
unionType: ["Node"],
fields: {
value: {
type: "string"
}
}
});
defineType("Data", {
unionType: ["Node"],
fields: {
memoryIndex: {
type: "Memidx"
},
offset: {
type: "Instruction"
},
init: {
type: "ByteArray"
}
}
});
defineType("Global", {
unionType: ["Node"],
fields: {
globalType: {
type: "GlobalType"
},
init: {
array: true,
type: "Instruction"
},
name: {
maybe: true,
type: "Identifier"
}
}
});
defineType("Table", {
unionType: ["Node", "ImportDescr"],
fields: {
elementType: {
type: "TableElementType"
},
limits: {
assertNodeType: true,
type: "Limit"
},
name: {
maybe: true,
type: "Identifier"
},
elements: {
array: true,
optional: true,
type: "Index"
}
}
});
defineType("Memory", {
unionType: ["Node", "ImportDescr"],
fields: {
limits: {
type: "Limit"
},
id: {
maybe: true,
type: "Index"
}
}
});
defineType("FuncImportDescr", {
unionType: ["Node", "ImportDescr"],
fields: {
id: {
type: "Identifier"
},
signature: {
type: "Signature"
}
}
});
defineType("ModuleImport", {
unionType: ["Node"],
fields: {
module: {
type: "string"
},
name: {
type: "string"
},
descr: {
type: "ImportDescr"
}
}
});
defineType("ModuleExportDescr", {
unionType: ["Node"],
fields: {
exportType: {
type: "ExportDescrType"
},
id: {
type: "Index"
}
}
});
defineType("ModuleExport", {
unionType: ["Node"],
fields: {
name: {
type: "string"
},
descr: {
type: "ModuleExportDescr"
}
}
});
defineType("Limit", {
unionType: ["Node"],
fields: {
min: {
type: "number"
},
max: {
optional: true,
type: "number"
}
}
});
defineType("Signature", {
unionType: ["Node"],
fields: {
params: {
array: true,
type: "FuncParam"
},
results: {
array: true,
type: "Valtype"
}
}
});
defineType("Program", {
unionType: ["Node"],
fields: {
body: {
array: true,
type: "Node"
}
}
});
defineType("Identifier", {
unionType: ["Node", "Expression"],
fields: {
value: {
type: "string"
},
raw: {
optional: true,
type: "string"
}
}
});
defineType("BlockInstruction", {
unionType: ["Node", "Block", "Instruction"],
fields: {
id: {
constant: true,
type: "string",
value: "block"
},
label: {
maybe: true,
type: "Identifier"
},
instr: {
array: true,
type: "Instruction"
},
result: {
maybe: true,
type: "Valtype"
}
}
});
defineType("CallInstruction", {
unionType: ["Node", "Instruction"],
fields: {
id: {
constant: true,
type: "string",
value: "call"
},
index: {
type: "Index"
},
instrArgs: {
array: true,
optional: true,
type: "Expression"
},
numeric: {
type: "Index",
optional: true
}
}
});
defineType("CallIndirectInstruction", {
unionType: ["Node", "Instruction"],
fields: {
id: {
constant: true,
type: "string",
value: "call_indirect"
},
signature: {
type: "SignatureOrTypeRef"
},
intrs: {
array: true,
optional: true,
type: "Expression"
}
}
});
defineType("ByteArray", {
unionType: ["Node"],
fields: {
values: {
array: true,
type: "Byte"
}
}
});
defineType("Func", {
unionType: ["Node", "Block"],
fields: {
name: {
maybe: true,
type: "Index"
},
signature: {
type: "SignatureOrTypeRef"
},
body: {
array: true,
type: "Instruction"
},
isExternal: {
comment: "means that it has been imported from the outside js",
optional: true,
type: "boolean"
},
metadata: {
optional: true,
type: "FuncMetadata"
}
}
});
/**
* Intrinsics
*/
defineType("InternalBrUnless", {
unionType: ["Node", "Intrinsic"],
fields: {
target: {
type: "number"
}
}
});
defineType("InternalGoto", {
unionType: ["Node", "Intrinsic"],
fields: {
target: {
type: "number"
}
}
});
defineType("InternalCallExtern", {
unionType: ["Node", "Intrinsic"],
fields: {
target: {
type: "number"
}
}
}); // function bodies are terminated by an `end` instruction but are missing a
// return instruction
//
// Since we can't inject a new instruction we are injecting a new instruction.
defineType("InternalEndAndReturn", {
unionType: ["Node", "Intrinsic"],
fields: {}
});
module.exports = definitions;

View File

@@ -1,6 +0,0 @@
export * from "./nodes";
export { numberLiteralFromRaw, withLoc, withRaw, funcParam, indexLiteral, memIndexLiteral, instruction, objectInstruction } from "./node-helpers.js";
export { traverse } from "./traverse";
export { signatures } from "./signatures";
export * from "./utils";
export { cloneNode } from "./clone";

View File

@@ -1,84 +0,0 @@
import { parse32F, parse64F, parse32I, parse64I, parseU32, isNanLiteral, isInfLiteral } from "@webassemblyjs/wast-parser";
import { longNumberLiteral, floatLiteral, numberLiteral, instr } from "./nodes";
export function numberLiteralFromRaw(rawValue) {
var instructionType = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : "i32";
var original = rawValue; // Remove numeric separators _
if (typeof rawValue === "string") {
rawValue = rawValue.replace(/_/g, "");
}
if (typeof rawValue === "number") {
return numberLiteral(rawValue, String(original));
} else {
switch (instructionType) {
case "i32":
{
return numberLiteral(parse32I(rawValue), String(original));
}
case "u32":
{
return numberLiteral(parseU32(rawValue), String(original));
}
case "i64":
{
return longNumberLiteral(parse64I(rawValue), String(original));
}
case "f32":
{
return floatLiteral(parse32F(rawValue), isNanLiteral(rawValue), isInfLiteral(rawValue), String(original));
}
// f64
default:
{
return floatLiteral(parse64F(rawValue), isNanLiteral(rawValue), isInfLiteral(rawValue), String(original));
}
}
}
}
export function instruction(id) {
var args = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : [];
var namedArgs = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
return instr(id, undefined, args, namedArgs);
}
export function objectInstruction(id, object) {
var args = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : [];
var namedArgs = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
return instr(id, object, args, namedArgs);
}
/**
* Decorators
*/
export function withLoc(n, end, start) {
var loc = {
start: start,
end: end
};
n.loc = loc;
return n;
}
export function withRaw(n, raw) {
n.raw = raw;
return n;
}
export function funcParam(valtype, id) {
return {
id: id,
valtype: valtype
};
}
export function indexLiteral(value) {
// $FlowIgnore
var x = numberLiteralFromRaw(value, "u32");
return x;
}
export function memIndexLiteral(value) {
// $FlowIgnore
var x = numberLiteralFromRaw(value, "u32");
return x;
}

View File

@@ -1,137 +0,0 @@
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
function findParent(_ref, cb) {
var parentPath = _ref.parentPath;
if (parentPath == null) {
throw new Error("node is root");
}
var currentPath = parentPath;
while (cb(currentPath) !== false) {
// Hit the root node, stop
// $FlowIgnore
if (currentPath.parentPath == null) {
return null;
} // $FlowIgnore
currentPath = currentPath.parentPath;
}
return currentPath.node;
}
function insertBefore(context, newNode) {
return insert(context, newNode);
}
function insertAfter(context, newNode) {
return insert(context, newNode, 1);
}
function insert(_ref2, newNode) {
var node = _ref2.node,
inList = _ref2.inList,
parentPath = _ref2.parentPath,
parentKey = _ref2.parentKey;
var indexOffset = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
if (!inList) {
throw new Error('inList' + " error: " + ("insert can only be used for nodes that are within lists" || "unknown"));
}
if (!(parentPath != null)) {
throw new Error('parentPath != null' + " error: " + ("Can not remove root node" || "unknown"));
}
// $FlowIgnore
var parentList = parentPath.node[parentKey];
var indexInList = parentList.findIndex(function (n) {
return n === node;
});
parentList.splice(indexInList + indexOffset, 0, newNode);
}
function remove(_ref3) {
var node = _ref3.node,
parentKey = _ref3.parentKey,
parentPath = _ref3.parentPath;
if (!(parentPath != null)) {
throw new Error('parentPath != null' + " error: " + ("Can not remove root node" || "unknown"));
}
// $FlowIgnore
var parentNode = parentPath.node; // $FlowIgnore
var parentProperty = parentNode[parentKey];
if (Array.isArray(parentProperty)) {
// $FlowIgnore
parentNode[parentKey] = parentProperty.filter(function (n) {
return n !== node;
});
} else {
// $FlowIgnore
delete parentNode[parentKey];
}
node._deleted = true;
}
function stop(context) {
context.shouldStop = true;
}
function replaceWith(context, newNode) {
// $FlowIgnore
var parentNode = context.parentPath.node; // $FlowIgnore
var parentProperty = parentNode[context.parentKey];
if (Array.isArray(parentProperty)) {
var indexInList = parentProperty.findIndex(function (n) {
return n === context.node;
});
parentProperty.splice(indexInList, 1, newNode);
} else {
// $FlowIgnore
parentNode[context.parentKey] = newNode;
}
context.node._deleted = true;
context.node = newNode;
} // bind the context to the first argument of node operations
function bindNodeOperations(operations, context) {
var keys = Object.keys(operations);
var boundOperations = {};
keys.forEach(function (key) {
boundOperations[key] = operations[key].bind(null, context);
});
return boundOperations;
}
function createPathOperations(context) {
// $FlowIgnore
return bindNodeOperations({
findParent: findParent,
replaceWith: replaceWith,
remove: remove,
insertBefore: insertBefore,
insertAfter: insertAfter,
stop: stop
}, context);
}
export function createPath(context) {
var path = _extends({}, context); // $FlowIgnore
Object.assign(path, createPathOperations(path)); // $FlowIgnore
return path;
}

View File

@@ -1,915 +0,0 @@
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
// THIS FILE IS AUTOGENERATED
// see scripts/generateNodeUtils.js
function isTypeOf(t) {
return function (n) {
return n.type === t;
};
}
function assertTypeOf(t) {
return function (n) {
return function () {
if (!(n.type === t)) {
throw new Error('n.type === t' + " error: " + (undefined || "unknown"));
}
}();
};
}
export function module(id, fields, metadata) {
if (id !== null && id !== undefined) {
if (!(typeof id === "string")) {
throw new Error('typeof id === "string"' + " error: " + ("Argument id must be of type string, given: " + _typeof(id) || "unknown"));
}
}
if (!(_typeof(fields) === "object" && typeof fields.length !== "undefined")) {
throw new Error('typeof fields === "object" && typeof fields.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
var node = {
type: "Module",
id: id,
fields: fields
};
if (typeof metadata !== "undefined") {
node.metadata = metadata;
}
return node;
}
export function moduleMetadata(sections, functionNames, localNames, producers) {
if (!(_typeof(sections) === "object" && typeof sections.length !== "undefined")) {
throw new Error('typeof sections === "object" && typeof sections.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
if (functionNames !== null && functionNames !== undefined) {
if (!(_typeof(functionNames) === "object" && typeof functionNames.length !== "undefined")) {
throw new Error('typeof functionNames === "object" && typeof functionNames.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
}
if (localNames !== null && localNames !== undefined) {
if (!(_typeof(localNames) === "object" && typeof localNames.length !== "undefined")) {
throw new Error('typeof localNames === "object" && typeof localNames.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
}
if (producers !== null && producers !== undefined) {
if (!(_typeof(producers) === "object" && typeof producers.length !== "undefined")) {
throw new Error('typeof producers === "object" && typeof producers.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
}
var node = {
type: "ModuleMetadata",
sections: sections
};
if (typeof functionNames !== "undefined" && functionNames.length > 0) {
node.functionNames = functionNames;
}
if (typeof localNames !== "undefined" && localNames.length > 0) {
node.localNames = localNames;
}
if (typeof producers !== "undefined" && producers.length > 0) {
node.producers = producers;
}
return node;
}
export function moduleNameMetadata(value) {
if (!(typeof value === "string")) {
throw new Error('typeof value === "string"' + " error: " + ("Argument value must be of type string, given: " + _typeof(value) || "unknown"));
}
var node = {
type: "ModuleNameMetadata",
value: value
};
return node;
}
export function functionNameMetadata(value, index) {
if (!(typeof value === "string")) {
throw new Error('typeof value === "string"' + " error: " + ("Argument value must be of type string, given: " + _typeof(value) || "unknown"));
}
if (!(typeof index === "number")) {
throw new Error('typeof index === "number"' + " error: " + ("Argument index must be of type number, given: " + _typeof(index) || "unknown"));
}
var node = {
type: "FunctionNameMetadata",
value: value,
index: index
};
return node;
}
export function localNameMetadata(value, localIndex, functionIndex) {
if (!(typeof value === "string")) {
throw new Error('typeof value === "string"' + " error: " + ("Argument value must be of type string, given: " + _typeof(value) || "unknown"));
}
if (!(typeof localIndex === "number")) {
throw new Error('typeof localIndex === "number"' + " error: " + ("Argument localIndex must be of type number, given: " + _typeof(localIndex) || "unknown"));
}
if (!(typeof functionIndex === "number")) {
throw new Error('typeof functionIndex === "number"' + " error: " + ("Argument functionIndex must be of type number, given: " + _typeof(functionIndex) || "unknown"));
}
var node = {
type: "LocalNameMetadata",
value: value,
localIndex: localIndex,
functionIndex: functionIndex
};
return node;
}
export function binaryModule(id, blob) {
if (id !== null && id !== undefined) {
if (!(typeof id === "string")) {
throw new Error('typeof id === "string"' + " error: " + ("Argument id must be of type string, given: " + _typeof(id) || "unknown"));
}
}
if (!(_typeof(blob) === "object" && typeof blob.length !== "undefined")) {
throw new Error('typeof blob === "object" && typeof blob.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
var node = {
type: "BinaryModule",
id: id,
blob: blob
};
return node;
}
export function quoteModule(id, string) {
if (id !== null && id !== undefined) {
if (!(typeof id === "string")) {
throw new Error('typeof id === "string"' + " error: " + ("Argument id must be of type string, given: " + _typeof(id) || "unknown"));
}
}
if (!(_typeof(string) === "object" && typeof string.length !== "undefined")) {
throw new Error('typeof string === "object" && typeof string.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
var node = {
type: "QuoteModule",
id: id,
string: string
};
return node;
}
export function sectionMetadata(section, startOffset, size, vectorOfSize) {
if (!(typeof startOffset === "number")) {
throw new Error('typeof startOffset === "number"' + " error: " + ("Argument startOffset must be of type number, given: " + _typeof(startOffset) || "unknown"));
}
var node = {
type: "SectionMetadata",
section: section,
startOffset: startOffset,
size: size,
vectorOfSize: vectorOfSize
};
return node;
}
export function producersSectionMetadata(producers) {
if (!(_typeof(producers) === "object" && typeof producers.length !== "undefined")) {
throw new Error('typeof producers === "object" && typeof producers.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
var node = {
type: "ProducersSectionMetadata",
producers: producers
};
return node;
}
export function producerMetadata(language, processedBy, sdk) {
if (!(_typeof(language) === "object" && typeof language.length !== "undefined")) {
throw new Error('typeof language === "object" && typeof language.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
if (!(_typeof(processedBy) === "object" && typeof processedBy.length !== "undefined")) {
throw new Error('typeof processedBy === "object" && typeof processedBy.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
if (!(_typeof(sdk) === "object" && typeof sdk.length !== "undefined")) {
throw new Error('typeof sdk === "object" && typeof sdk.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
var node = {
type: "ProducerMetadata",
language: language,
processedBy: processedBy,
sdk: sdk
};
return node;
}
export function producerMetadataVersionedName(name, version) {
if (!(typeof name === "string")) {
throw new Error('typeof name === "string"' + " error: " + ("Argument name must be of type string, given: " + _typeof(name) || "unknown"));
}
if (!(typeof version === "string")) {
throw new Error('typeof version === "string"' + " error: " + ("Argument version must be of type string, given: " + _typeof(version) || "unknown"));
}
var node = {
type: "ProducerMetadataVersionedName",
name: name,
version: version
};
return node;
}
export function loopInstruction(label, resulttype, instr) {
if (!(_typeof(instr) === "object" && typeof instr.length !== "undefined")) {
throw new Error('typeof instr === "object" && typeof instr.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
var node = {
type: "LoopInstruction",
id: "loop",
label: label,
resulttype: resulttype,
instr: instr
};
return node;
}
export function instr(id, object, args, namedArgs) {
if (!(typeof id === "string")) {
throw new Error('typeof id === "string"' + " error: " + ("Argument id must be of type string, given: " + _typeof(id) || "unknown"));
}
if (!(_typeof(args) === "object" && typeof args.length !== "undefined")) {
throw new Error('typeof args === "object" && typeof args.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
var node = {
type: "Instr",
id: id,
args: args
};
if (typeof object !== "undefined") {
node.object = object;
}
if (typeof namedArgs !== "undefined" && Object.keys(namedArgs).length !== 0) {
node.namedArgs = namedArgs;
}
return node;
}
export function ifInstruction(testLabel, test, result, consequent, alternate) {
if (!(_typeof(test) === "object" && typeof test.length !== "undefined")) {
throw new Error('typeof test === "object" && typeof test.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
if (!(_typeof(consequent) === "object" && typeof consequent.length !== "undefined")) {
throw new Error('typeof consequent === "object" && typeof consequent.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
if (!(_typeof(alternate) === "object" && typeof alternate.length !== "undefined")) {
throw new Error('typeof alternate === "object" && typeof alternate.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
var node = {
type: "IfInstruction",
id: "if",
testLabel: testLabel,
test: test,
result: result,
consequent: consequent,
alternate: alternate
};
return node;
}
export function stringLiteral(value) {
if (!(typeof value === "string")) {
throw new Error('typeof value === "string"' + " error: " + ("Argument value must be of type string, given: " + _typeof(value) || "unknown"));
}
var node = {
type: "StringLiteral",
value: value
};
return node;
}
export function numberLiteral(value, raw) {
if (!(typeof value === "number")) {
throw new Error('typeof value === "number"' + " error: " + ("Argument value must be of type number, given: " + _typeof(value) || "unknown"));
}
if (!(typeof raw === "string")) {
throw new Error('typeof raw === "string"' + " error: " + ("Argument raw must be of type string, given: " + _typeof(raw) || "unknown"));
}
var node = {
type: "NumberLiteral",
value: value,
raw: raw
};
return node;
}
export function longNumberLiteral(value, raw) {
if (!(typeof raw === "string")) {
throw new Error('typeof raw === "string"' + " error: " + ("Argument raw must be of type string, given: " + _typeof(raw) || "unknown"));
}
var node = {
type: "LongNumberLiteral",
value: value,
raw: raw
};
return node;
}
export function floatLiteral(value, nan, inf, raw) {
if (!(typeof value === "number")) {
throw new Error('typeof value === "number"' + " error: " + ("Argument value must be of type number, given: " + _typeof(value) || "unknown"));
}
if (nan !== null && nan !== undefined) {
if (!(typeof nan === "boolean")) {
throw new Error('typeof nan === "boolean"' + " error: " + ("Argument nan must be of type boolean, given: " + _typeof(nan) || "unknown"));
}
}
if (inf !== null && inf !== undefined) {
if (!(typeof inf === "boolean")) {
throw new Error('typeof inf === "boolean"' + " error: " + ("Argument inf must be of type boolean, given: " + _typeof(inf) || "unknown"));
}
}
if (!(typeof raw === "string")) {
throw new Error('typeof raw === "string"' + " error: " + ("Argument raw must be of type string, given: " + _typeof(raw) || "unknown"));
}
var node = {
type: "FloatLiteral",
value: value,
raw: raw
};
if (nan === true) {
node.nan = true;
}
if (inf === true) {
node.inf = true;
}
return node;
}
export function elem(table, offset, funcs) {
if (!(_typeof(offset) === "object" && typeof offset.length !== "undefined")) {
throw new Error('typeof offset === "object" && typeof offset.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
if (!(_typeof(funcs) === "object" && typeof funcs.length !== "undefined")) {
throw new Error('typeof funcs === "object" && typeof funcs.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
var node = {
type: "Elem",
table: table,
offset: offset,
funcs: funcs
};
return node;
}
export function indexInFuncSection(index) {
var node = {
type: "IndexInFuncSection",
index: index
};
return node;
}
export function valtypeLiteral(name) {
var node = {
type: "ValtypeLiteral",
name: name
};
return node;
}
export function typeInstruction(id, functype) {
var node = {
type: "TypeInstruction",
id: id,
functype: functype
};
return node;
}
export function start(index) {
var node = {
type: "Start",
index: index
};
return node;
}
export function globalType(valtype, mutability) {
var node = {
type: "GlobalType",
valtype: valtype,
mutability: mutability
};
return node;
}
export function leadingComment(value) {
if (!(typeof value === "string")) {
throw new Error('typeof value === "string"' + " error: " + ("Argument value must be of type string, given: " + _typeof(value) || "unknown"));
}
var node = {
type: "LeadingComment",
value: value
};
return node;
}
export function blockComment(value) {
if (!(typeof value === "string")) {
throw new Error('typeof value === "string"' + " error: " + ("Argument value must be of type string, given: " + _typeof(value) || "unknown"));
}
var node = {
type: "BlockComment",
value: value
};
return node;
}
export function data(memoryIndex, offset, init) {
var node = {
type: "Data",
memoryIndex: memoryIndex,
offset: offset,
init: init
};
return node;
}
export function global(globalType, init, name) {
if (!(_typeof(init) === "object" && typeof init.length !== "undefined")) {
throw new Error('typeof init === "object" && typeof init.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
var node = {
type: "Global",
globalType: globalType,
init: init,
name: name
};
return node;
}
export function table(elementType, limits, name, elements) {
if (!(limits.type === "Limit")) {
throw new Error('limits.type === "Limit"' + " error: " + ("Argument limits must be of type Limit, given: " + limits.type || "unknown"));
}
if (elements !== null && elements !== undefined) {
if (!(_typeof(elements) === "object" && typeof elements.length !== "undefined")) {
throw new Error('typeof elements === "object" && typeof elements.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
}
var node = {
type: "Table",
elementType: elementType,
limits: limits,
name: name
};
if (typeof elements !== "undefined" && elements.length > 0) {
node.elements = elements;
}
return node;
}
export function memory(limits, id) {
var node = {
type: "Memory",
limits: limits,
id: id
};
return node;
}
export function funcImportDescr(id, signature) {
var node = {
type: "FuncImportDescr",
id: id,
signature: signature
};
return node;
}
export function moduleImport(module, name, descr) {
if (!(typeof module === "string")) {
throw new Error('typeof module === "string"' + " error: " + ("Argument module must be of type string, given: " + _typeof(module) || "unknown"));
}
if (!(typeof name === "string")) {
throw new Error('typeof name === "string"' + " error: " + ("Argument name must be of type string, given: " + _typeof(name) || "unknown"));
}
var node = {
type: "ModuleImport",
module: module,
name: name,
descr: descr
};
return node;
}
export function moduleExportDescr(exportType, id) {
var node = {
type: "ModuleExportDescr",
exportType: exportType,
id: id
};
return node;
}
export function moduleExport(name, descr) {
if (!(typeof name === "string")) {
throw new Error('typeof name === "string"' + " error: " + ("Argument name must be of type string, given: " + _typeof(name) || "unknown"));
}
var node = {
type: "ModuleExport",
name: name,
descr: descr
};
return node;
}
export function limit(min, max) {
if (!(typeof min === "number")) {
throw new Error('typeof min === "number"' + " error: " + ("Argument min must be of type number, given: " + _typeof(min) || "unknown"));
}
if (max !== null && max !== undefined) {
if (!(typeof max === "number")) {
throw new Error('typeof max === "number"' + " error: " + ("Argument max must be of type number, given: " + _typeof(max) || "unknown"));
}
}
var node = {
type: "Limit",
min: min
};
if (typeof max !== "undefined") {
node.max = max;
}
return node;
}
export function signature(params, results) {
if (!(_typeof(params) === "object" && typeof params.length !== "undefined")) {
throw new Error('typeof params === "object" && typeof params.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
if (!(_typeof(results) === "object" && typeof results.length !== "undefined")) {
throw new Error('typeof results === "object" && typeof results.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
var node = {
type: "Signature",
params: params,
results: results
};
return node;
}
export function program(body) {
if (!(_typeof(body) === "object" && typeof body.length !== "undefined")) {
throw new Error('typeof body === "object" && typeof body.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
var node = {
type: "Program",
body: body
};
return node;
}
export function identifier(value, raw) {
if (!(typeof value === "string")) {
throw new Error('typeof value === "string"' + " error: " + ("Argument value must be of type string, given: " + _typeof(value) || "unknown"));
}
if (raw !== null && raw !== undefined) {
if (!(typeof raw === "string")) {
throw new Error('typeof raw === "string"' + " error: " + ("Argument raw must be of type string, given: " + _typeof(raw) || "unknown"));
}
}
var node = {
type: "Identifier",
value: value
};
if (typeof raw !== "undefined") {
node.raw = raw;
}
return node;
}
export function blockInstruction(label, instr, result) {
if (!(_typeof(instr) === "object" && typeof instr.length !== "undefined")) {
throw new Error('typeof instr === "object" && typeof instr.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
var node = {
type: "BlockInstruction",
id: "block",
label: label,
instr: instr,
result: result
};
return node;
}
export function callInstruction(index, instrArgs, numeric) {
if (instrArgs !== null && instrArgs !== undefined) {
if (!(_typeof(instrArgs) === "object" && typeof instrArgs.length !== "undefined")) {
throw new Error('typeof instrArgs === "object" && typeof instrArgs.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
}
var node = {
type: "CallInstruction",
id: "call",
index: index
};
if (typeof instrArgs !== "undefined" && instrArgs.length > 0) {
node.instrArgs = instrArgs;
}
if (typeof numeric !== "undefined") {
node.numeric = numeric;
}
return node;
}
export function callIndirectInstruction(signature, intrs) {
if (intrs !== null && intrs !== undefined) {
if (!(_typeof(intrs) === "object" && typeof intrs.length !== "undefined")) {
throw new Error('typeof intrs === "object" && typeof intrs.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
}
var node = {
type: "CallIndirectInstruction",
id: "call_indirect",
signature: signature
};
if (typeof intrs !== "undefined" && intrs.length > 0) {
node.intrs = intrs;
}
return node;
}
export function byteArray(values) {
if (!(_typeof(values) === "object" && typeof values.length !== "undefined")) {
throw new Error('typeof values === "object" && typeof values.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
var node = {
type: "ByteArray",
values: values
};
return node;
}
export function func(name, signature, body, isExternal, metadata) {
if (!(_typeof(body) === "object" && typeof body.length !== "undefined")) {
throw new Error('typeof body === "object" && typeof body.length !== "undefined"' + " error: " + (undefined || "unknown"));
}
if (isExternal !== null && isExternal !== undefined) {
if (!(typeof isExternal === "boolean")) {
throw new Error('typeof isExternal === "boolean"' + " error: " + ("Argument isExternal must be of type boolean, given: " + _typeof(isExternal) || "unknown"));
}
}
var node = {
type: "Func",
name: name,
signature: signature,
body: body
};
if (isExternal === true) {
node.isExternal = true;
}
if (typeof metadata !== "undefined") {
node.metadata = metadata;
}
return node;
}
export function internalBrUnless(target) {
if (!(typeof target === "number")) {
throw new Error('typeof target === "number"' + " error: " + ("Argument target must be of type number, given: " + _typeof(target) || "unknown"));
}
var node = {
type: "InternalBrUnless",
target: target
};
return node;
}
export function internalGoto(target) {
if (!(typeof target === "number")) {
throw new Error('typeof target === "number"' + " error: " + ("Argument target must be of type number, given: " + _typeof(target) || "unknown"));
}
var node = {
type: "InternalGoto",
target: target
};
return node;
}
export function internalCallExtern(target) {
if (!(typeof target === "number")) {
throw new Error('typeof target === "number"' + " error: " + ("Argument target must be of type number, given: " + _typeof(target) || "unknown"));
}
var node = {
type: "InternalCallExtern",
target: target
};
return node;
}
export function internalEndAndReturn() {
var node = {
type: "InternalEndAndReturn"
};
return node;
}
export var isModule = isTypeOf("Module");
export var isModuleMetadata = isTypeOf("ModuleMetadata");
export var isModuleNameMetadata = isTypeOf("ModuleNameMetadata");
export var isFunctionNameMetadata = isTypeOf("FunctionNameMetadata");
export var isLocalNameMetadata = isTypeOf("LocalNameMetadata");
export var isBinaryModule = isTypeOf("BinaryModule");
export var isQuoteModule = isTypeOf("QuoteModule");
export var isSectionMetadata = isTypeOf("SectionMetadata");
export var isProducersSectionMetadata = isTypeOf("ProducersSectionMetadata");
export var isProducerMetadata = isTypeOf("ProducerMetadata");
export var isProducerMetadataVersionedName = isTypeOf("ProducerMetadataVersionedName");
export var isLoopInstruction = isTypeOf("LoopInstruction");
export var isInstr = isTypeOf("Instr");
export var isIfInstruction = isTypeOf("IfInstruction");
export var isStringLiteral = isTypeOf("StringLiteral");
export var isNumberLiteral = isTypeOf("NumberLiteral");
export var isLongNumberLiteral = isTypeOf("LongNumberLiteral");
export var isFloatLiteral = isTypeOf("FloatLiteral");
export var isElem = isTypeOf("Elem");
export var isIndexInFuncSection = isTypeOf("IndexInFuncSection");
export var isValtypeLiteral = isTypeOf("ValtypeLiteral");
export var isTypeInstruction = isTypeOf("TypeInstruction");
export var isStart = isTypeOf("Start");
export var isGlobalType = isTypeOf("GlobalType");
export var isLeadingComment = isTypeOf("LeadingComment");
export var isBlockComment = isTypeOf("BlockComment");
export var isData = isTypeOf("Data");
export var isGlobal = isTypeOf("Global");
export var isTable = isTypeOf("Table");
export var isMemory = isTypeOf("Memory");
export var isFuncImportDescr = isTypeOf("FuncImportDescr");
export var isModuleImport = isTypeOf("ModuleImport");
export var isModuleExportDescr = isTypeOf("ModuleExportDescr");
export var isModuleExport = isTypeOf("ModuleExport");
export var isLimit = isTypeOf("Limit");
export var isSignature = isTypeOf("Signature");
export var isProgram = isTypeOf("Program");
export var isIdentifier = isTypeOf("Identifier");
export var isBlockInstruction = isTypeOf("BlockInstruction");
export var isCallInstruction = isTypeOf("CallInstruction");
export var isCallIndirectInstruction = isTypeOf("CallIndirectInstruction");
export var isByteArray = isTypeOf("ByteArray");
export var isFunc = isTypeOf("Func");
export var isInternalBrUnless = isTypeOf("InternalBrUnless");
export var isInternalGoto = isTypeOf("InternalGoto");
export var isInternalCallExtern = isTypeOf("InternalCallExtern");
export var isInternalEndAndReturn = isTypeOf("InternalEndAndReturn");
export var isNode = function isNode(node) {
return isModule(node) || isModuleMetadata(node) || isModuleNameMetadata(node) || isFunctionNameMetadata(node) || isLocalNameMetadata(node) || isBinaryModule(node) || isQuoteModule(node) || isSectionMetadata(node) || isProducersSectionMetadata(node) || isProducerMetadata(node) || isProducerMetadataVersionedName(node) || isLoopInstruction(node) || isInstr(node) || isIfInstruction(node) || isStringLiteral(node) || isNumberLiteral(node) || isLongNumberLiteral(node) || isFloatLiteral(node) || isElem(node) || isIndexInFuncSection(node) || isValtypeLiteral(node) || isTypeInstruction(node) || isStart(node) || isGlobalType(node) || isLeadingComment(node) || isBlockComment(node) || isData(node) || isGlobal(node) || isTable(node) || isMemory(node) || isFuncImportDescr(node) || isModuleImport(node) || isModuleExportDescr(node) || isModuleExport(node) || isLimit(node) || isSignature(node) || isProgram(node) || isIdentifier(node) || isBlockInstruction(node) || isCallInstruction(node) || isCallIndirectInstruction(node) || isByteArray(node) || isFunc(node) || isInternalBrUnless(node) || isInternalGoto(node) || isInternalCallExtern(node) || isInternalEndAndReturn(node);
};
export var isBlock = function isBlock(node) {
return isLoopInstruction(node) || isBlockInstruction(node) || isFunc(node);
};
export var isInstruction = function isInstruction(node) {
return isLoopInstruction(node) || isInstr(node) || isIfInstruction(node) || isTypeInstruction(node) || isBlockInstruction(node) || isCallInstruction(node) || isCallIndirectInstruction(node);
};
export var isExpression = function isExpression(node) {
return isInstr(node) || isStringLiteral(node) || isNumberLiteral(node) || isLongNumberLiteral(node) || isFloatLiteral(node) || isValtypeLiteral(node) || isIdentifier(node);
};
export var isNumericLiteral = function isNumericLiteral(node) {
return isNumberLiteral(node) || isLongNumberLiteral(node) || isFloatLiteral(node);
};
export var isImportDescr = function isImportDescr(node) {
return isGlobalType(node) || isTable(node) || isMemory(node) || isFuncImportDescr(node);
};
export var isIntrinsic = function isIntrinsic(node) {
return isInternalBrUnless(node) || isInternalGoto(node) || isInternalCallExtern(node) || isInternalEndAndReturn(node);
};
export var assertModule = assertTypeOf("Module");
export var assertModuleMetadata = assertTypeOf("ModuleMetadata");
export var assertModuleNameMetadata = assertTypeOf("ModuleNameMetadata");
export var assertFunctionNameMetadata = assertTypeOf("FunctionNameMetadata");
export var assertLocalNameMetadata = assertTypeOf("LocalNameMetadata");
export var assertBinaryModule = assertTypeOf("BinaryModule");
export var assertQuoteModule = assertTypeOf("QuoteModule");
export var assertSectionMetadata = assertTypeOf("SectionMetadata");
export var assertProducersSectionMetadata = assertTypeOf("ProducersSectionMetadata");
export var assertProducerMetadata = assertTypeOf("ProducerMetadata");
export var assertProducerMetadataVersionedName = assertTypeOf("ProducerMetadataVersionedName");
export var assertLoopInstruction = assertTypeOf("LoopInstruction");
export var assertInstr = assertTypeOf("Instr");
export var assertIfInstruction = assertTypeOf("IfInstruction");
export var assertStringLiteral = assertTypeOf("StringLiteral");
export var assertNumberLiteral = assertTypeOf("NumberLiteral");
export var assertLongNumberLiteral = assertTypeOf("LongNumberLiteral");
export var assertFloatLiteral = assertTypeOf("FloatLiteral");
export var assertElem = assertTypeOf("Elem");
export var assertIndexInFuncSection = assertTypeOf("IndexInFuncSection");
export var assertValtypeLiteral = assertTypeOf("ValtypeLiteral");
export var assertTypeInstruction = assertTypeOf("TypeInstruction");
export var assertStart = assertTypeOf("Start");
export var assertGlobalType = assertTypeOf("GlobalType");
export var assertLeadingComment = assertTypeOf("LeadingComment");
export var assertBlockComment = assertTypeOf("BlockComment");
export var assertData = assertTypeOf("Data");
export var assertGlobal = assertTypeOf("Global");
export var assertTable = assertTypeOf("Table");
export var assertMemory = assertTypeOf("Memory");
export var assertFuncImportDescr = assertTypeOf("FuncImportDescr");
export var assertModuleImport = assertTypeOf("ModuleImport");
export var assertModuleExportDescr = assertTypeOf("ModuleExportDescr");
export var assertModuleExport = assertTypeOf("ModuleExport");
export var assertLimit = assertTypeOf("Limit");
export var assertSignature = assertTypeOf("Signature");
export var assertProgram = assertTypeOf("Program");
export var assertIdentifier = assertTypeOf("Identifier");
export var assertBlockInstruction = assertTypeOf("BlockInstruction");
export var assertCallInstruction = assertTypeOf("CallInstruction");
export var assertCallIndirectInstruction = assertTypeOf("CallIndirectInstruction");
export var assertByteArray = assertTypeOf("ByteArray");
export var assertFunc = assertTypeOf("Func");
export var assertInternalBrUnless = assertTypeOf("InternalBrUnless");
export var assertInternalGoto = assertTypeOf("InternalGoto");
export var assertInternalCallExtern = assertTypeOf("InternalCallExtern");
export var assertInternalEndAndReturn = assertTypeOf("InternalEndAndReturn");
export var unionTypesMap = {
Module: ["Node"],
ModuleMetadata: ["Node"],
ModuleNameMetadata: ["Node"],
FunctionNameMetadata: ["Node"],
LocalNameMetadata: ["Node"],
BinaryModule: ["Node"],
QuoteModule: ["Node"],
SectionMetadata: ["Node"],
ProducersSectionMetadata: ["Node"],
ProducerMetadata: ["Node"],
ProducerMetadataVersionedName: ["Node"],
LoopInstruction: ["Node", "Block", "Instruction"],
Instr: ["Node", "Expression", "Instruction"],
IfInstruction: ["Node", "Instruction"],
StringLiteral: ["Node", "Expression"],
NumberLiteral: ["Node", "NumericLiteral", "Expression"],
LongNumberLiteral: ["Node", "NumericLiteral", "Expression"],
FloatLiteral: ["Node", "NumericLiteral", "Expression"],
Elem: ["Node"],
IndexInFuncSection: ["Node"],
ValtypeLiteral: ["Node", "Expression"],
TypeInstruction: ["Node", "Instruction"],
Start: ["Node"],
GlobalType: ["Node", "ImportDescr"],
LeadingComment: ["Node"],
BlockComment: ["Node"],
Data: ["Node"],
Global: ["Node"],
Table: ["Node", "ImportDescr"],
Memory: ["Node", "ImportDescr"],
FuncImportDescr: ["Node", "ImportDescr"],
ModuleImport: ["Node"],
ModuleExportDescr: ["Node"],
ModuleExport: ["Node"],
Limit: ["Node"],
Signature: ["Node"],
Program: ["Node"],
Identifier: ["Node", "Expression"],
BlockInstruction: ["Node", "Block", "Instruction"],
CallInstruction: ["Node", "Instruction"],
CallIndirectInstruction: ["Node", "Instruction"],
ByteArray: ["Node"],
Func: ["Node", "Block"],
InternalBrUnless: ["Node", "Intrinsic"],
InternalGoto: ["Node", "Intrinsic"],
InternalCallExtern: ["Node", "Intrinsic"],
InternalEndAndReturn: ["Node", "Intrinsic"]
};
export var nodeAndUnionTypes = ["Module", "ModuleMetadata", "ModuleNameMetadata", "FunctionNameMetadata", "LocalNameMetadata", "BinaryModule", "QuoteModule", "SectionMetadata", "ProducersSectionMetadata", "ProducerMetadata", "ProducerMetadataVersionedName", "LoopInstruction", "Instr", "IfInstruction", "StringLiteral", "NumberLiteral", "LongNumberLiteral", "FloatLiteral", "Elem", "IndexInFuncSection", "ValtypeLiteral", "TypeInstruction", "Start", "GlobalType", "LeadingComment", "BlockComment", "Data", "Global", "Table", "Memory", "FuncImportDescr", "ModuleImport", "ModuleExportDescr", "ModuleExport", "Limit", "Signature", "Program", "Identifier", "BlockInstruction", "CallInstruction", "CallIndirectInstruction", "ByteArray", "Func", "InternalBrUnless", "InternalGoto", "InternalCallExtern", "InternalEndAndReturn", "Node", "Block", "Instruction", "Expression", "NumericLiteral", "ImportDescr", "Intrinsic"];

View File

@@ -1,199 +0,0 @@
function sign(input, output) {
return [input, output];
}
var u32 = "u32";
var i32 = "i32";
var i64 = "i64";
var f32 = "f32";
var f64 = "f64";
var vector = function vector(t) {
var vecType = [t]; // $FlowIgnore
vecType.vector = true;
return vecType;
};
var controlInstructions = {
unreachable: sign([], []),
nop: sign([], []),
// block ?
// loop ?
// if ?
// if else ?
br: sign([u32], []),
br_if: sign([u32], []),
br_table: sign(vector(u32), []),
return: sign([], []),
call: sign([u32], []),
call_indirect: sign([u32], [])
};
var parametricInstructions = {
drop: sign([], []),
select: sign([], [])
};
var variableInstructions = {
get_local: sign([u32], []),
set_local: sign([u32], []),
tee_local: sign([u32], []),
get_global: sign([u32], []),
set_global: sign([u32], [])
};
var memoryInstructions = {
"i32.load": sign([u32, u32], [i32]),
"i64.load": sign([u32, u32], []),
"f32.load": sign([u32, u32], []),
"f64.load": sign([u32, u32], []),
"i32.load8_s": sign([u32, u32], [i32]),
"i32.load8_u": sign([u32, u32], [i32]),
"i32.load16_s": sign([u32, u32], [i32]),
"i32.load16_u": sign([u32, u32], [i32]),
"i64.load8_s": sign([u32, u32], [i64]),
"i64.load8_u": sign([u32, u32], [i64]),
"i64.load16_s": sign([u32, u32], [i64]),
"i64.load16_u": sign([u32, u32], [i64]),
"i64.load32_s": sign([u32, u32], [i64]),
"i64.load32_u": sign([u32, u32], [i64]),
"i32.store": sign([u32, u32], []),
"i64.store": sign([u32, u32], []),
"f32.store": sign([u32, u32], []),
"f64.store": sign([u32, u32], []),
"i32.store8": sign([u32, u32], []),
"i32.store16": sign([u32, u32], []),
"i64.store8": sign([u32, u32], []),
"i64.store16": sign([u32, u32], []),
"i64.store32": sign([u32, u32], []),
current_memory: sign([], []),
grow_memory: sign([], [])
};
var numericInstructions = {
"i32.const": sign([i32], [i32]),
"i64.const": sign([i64], [i64]),
"f32.const": sign([f32], [f32]),
"f64.const": sign([f64], [f64]),
"i32.eqz": sign([i32], [i32]),
"i32.eq": sign([i32, i32], [i32]),
"i32.ne": sign([i32, i32], [i32]),
"i32.lt_s": sign([i32, i32], [i32]),
"i32.lt_u": sign([i32, i32], [i32]),
"i32.gt_s": sign([i32, i32], [i32]),
"i32.gt_u": sign([i32, i32], [i32]),
"i32.le_s": sign([i32, i32], [i32]),
"i32.le_u": sign([i32, i32], [i32]),
"i32.ge_s": sign([i32, i32], [i32]),
"i32.ge_u": sign([i32, i32], [i32]),
"i64.eqz": sign([i64], [i64]),
"i64.eq": sign([i64, i64], [i32]),
"i64.ne": sign([i64, i64], [i32]),
"i64.lt_s": sign([i64, i64], [i32]),
"i64.lt_u": sign([i64, i64], [i32]),
"i64.gt_s": sign([i64, i64], [i32]),
"i64.gt_u": sign([i64, i64], [i32]),
"i64.le_s": sign([i64, i64], [i32]),
"i64.le_u": sign([i64, i64], [i32]),
"i64.ge_s": sign([i64, i64], [i32]),
"i64.ge_u": sign([i64, i64], [i32]),
"f32.eq": sign([f32, f32], [i32]),
"f32.ne": sign([f32, f32], [i32]),
"f32.lt": sign([f32, f32], [i32]),
"f32.gt": sign([f32, f32], [i32]),
"f32.le": sign([f32, f32], [i32]),
"f32.ge": sign([f32, f32], [i32]),
"f64.eq": sign([f64, f64], [i32]),
"f64.ne": sign([f64, f64], [i32]),
"f64.lt": sign([f64, f64], [i32]),
"f64.gt": sign([f64, f64], [i32]),
"f64.le": sign([f64, f64], [i32]),
"f64.ge": sign([f64, f64], [i32]),
"i32.clz": sign([i32], [i32]),
"i32.ctz": sign([i32], [i32]),
"i32.popcnt": sign([i32], [i32]),
"i32.add": sign([i32, i32], [i32]),
"i32.sub": sign([i32, i32], [i32]),
"i32.mul": sign([i32, i32], [i32]),
"i32.div_s": sign([i32, i32], [i32]),
"i32.div_u": sign([i32, i32], [i32]),
"i32.rem_s": sign([i32, i32], [i32]),
"i32.rem_u": sign([i32, i32], [i32]),
"i32.and": sign([i32, i32], [i32]),
"i32.or": sign([i32, i32], [i32]),
"i32.xor": sign([i32, i32], [i32]),
"i32.shl": sign([i32, i32], [i32]),
"i32.shr_s": sign([i32, i32], [i32]),
"i32.shr_u": sign([i32, i32], [i32]),
"i32.rotl": sign([i32, i32], [i32]),
"i32.rotr": sign([i32, i32], [i32]),
"i64.clz": sign([i64], [i64]),
"i64.ctz": sign([i64], [i64]),
"i64.popcnt": sign([i64], [i64]),
"i64.add": sign([i64, i64], [i64]),
"i64.sub": sign([i64, i64], [i64]),
"i64.mul": sign([i64, i64], [i64]),
"i64.div_s": sign([i64, i64], [i64]),
"i64.div_u": sign([i64, i64], [i64]),
"i64.rem_s": sign([i64, i64], [i64]),
"i64.rem_u": sign([i64, i64], [i64]),
"i64.and": sign([i64, i64], [i64]),
"i64.or": sign([i64, i64], [i64]),
"i64.xor": sign([i64, i64], [i64]),
"i64.shl": sign([i64, i64], [i64]),
"i64.shr_s": sign([i64, i64], [i64]),
"i64.shr_u": sign([i64, i64], [i64]),
"i64.rotl": sign([i64, i64], [i64]),
"i64.rotr": sign([i64, i64], [i64]),
"f32.abs": sign([f32], [f32]),
"f32.neg": sign([f32], [f32]),
"f32.ceil": sign([f32], [f32]),
"f32.floor": sign([f32], [f32]),
"f32.trunc": sign([f32], [f32]),
"f32.nearest": sign([f32], [f32]),
"f32.sqrt": sign([f32], [f32]),
"f32.add": sign([f32, f32], [f32]),
"f32.sub": sign([f32, f32], [f32]),
"f32.mul": sign([f32, f32], [f32]),
"f32.div": sign([f32, f32], [f32]),
"f32.min": sign([f32, f32], [f32]),
"f32.max": sign([f32, f32], [f32]),
"f32.copysign": sign([f32, f32], [f32]),
"f64.abs": sign([f64], [f64]),
"f64.neg": sign([f64], [f64]),
"f64.ceil": sign([f64], [f64]),
"f64.floor": sign([f64], [f64]),
"f64.trunc": sign([f64], [f64]),
"f64.nearest": sign([f64], [f64]),
"f64.sqrt": sign([f64], [f64]),
"f64.add": sign([f64, f64], [f64]),
"f64.sub": sign([f64, f64], [f64]),
"f64.mul": sign([f64, f64], [f64]),
"f64.div": sign([f64, f64], [f64]),
"f64.min": sign([f64, f64], [f64]),
"f64.max": sign([f64, f64], [f64]),
"f64.copysign": sign([f64, f64], [f64]),
"i32.wrap/i64": sign([i64], [i32]),
"i32.trunc_s/f32": sign([f32], [i32]),
"i32.trunc_u/f32": sign([f32], [i32]),
"i32.trunc_s/f64": sign([f32], [i32]),
"i32.trunc_u/f64": sign([f64], [i32]),
"i64.extend_s/i32": sign([i32], [i64]),
"i64.extend_u/i32": sign([i32], [i64]),
"i64.trunc_s/f32": sign([f32], [i64]),
"i64.trunc_u/f32": sign([f32], [i64]),
"i64.trunc_s/f64": sign([f64], [i64]),
"i64.trunc_u/f64": sign([f64], [i64]),
"f32.convert_s/i32": sign([i32], [f32]),
"f32.convert_u/i32": sign([i32], [f32]),
"f32.convert_s/i64": sign([i64], [f32]),
"f32.convert_u/i64": sign([i64], [f32]),
"f32.demote/f64": sign([f64], [f32]),
"f64.convert_s/i32": sign([i32], [f64]),
"f64.convert_u/i32": sign([i32], [f64]),
"f64.convert_s/i64": sign([i64], [f64]),
"f64.convert_u/i64": sign([i64], [f64]),
"f64.promote/f32": sign([f32], [f64]),
"i32.reinterpret/f32": sign([f32], [i32]),
"i64.reinterpret/f64": sign([f64], [i64]),
"f32.reinterpret/i32": sign([i32], [f32]),
"f64.reinterpret/i64": sign([i64], [f64])
};
export var signatures = Object.assign({}, controlInstructions, parametricInstructions, variableInstructions, memoryInstructions, numericInstructions);

View File

@@ -1,76 +0,0 @@
var t = require("../../index"); // func and call_indirect instructions can either define a signature inline, or
// reference a signature, e.g.
//
// ;; inline signature
// (func (result i64)
// (i64.const 2)
// )
// ;; signature reference
// (type (func (result i64)))
// (func (type 0)
// (i64.const 2))
// )
//
// this AST transform denormalises the type references, making all signatures within the module
// inline.
export function transform(ast) {
var typeInstructions = [];
t.traverse(ast, {
TypeInstruction: function TypeInstruction(_ref) {
var node = _ref.node;
typeInstructions.push(node);
}
});
if (!typeInstructions.length) {
return;
}
function denormalizeSignature(signature) {
// signature referenced by identifier
if (signature.type === "Identifier") {
var identifier = signature;
var typeInstruction = typeInstructions.find(function (t) {
return t.id.type === identifier.type && t.id.value === identifier.value;
});
if (!typeInstruction) {
throw new Error("A type instruction reference was not found ".concat(JSON.stringify(signature)));
}
return typeInstruction.functype;
} // signature referenced by index
if (signature.type === "NumberLiteral") {
var signatureRef = signature;
var _typeInstruction = typeInstructions[signatureRef.value];
return _typeInstruction.functype;
}
return signature;
}
t.traverse(ast, {
Func: function (_Func) {
function Func(_x) {
return _Func.apply(this, arguments);
}
Func.toString = function () {
return _Func.toString();
};
return Func;
}(function (_ref2) {
var node = _ref2.node;
node.signature = denormalizeSignature(node.signature);
}),
CallIndirectInstruction: function CallIndirectInstruction(_ref3) {
var node = _ref3.node;
node.signature = denormalizeSignature(node.signature);
}
});
}

View File

@@ -1,216 +0,0 @@
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
function _sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
function _slicedToArray(arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return _sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }
import { isBlock, isFunc, isIdentifier, numberLiteralFromRaw, traverse } from "../../index";
import { moduleContextFromModuleAST } from "@webassemblyjs/helper-module-context"; // FIXME(sven): do the same with all block instructions, must be more generic here
function newUnexpectedFunction(i) {
return new Error("unknown function at offset: " + i);
}
export function transform(ast) {
var module;
traverse(ast, {
Module: function (_Module) {
function Module(_x) {
return _Module.apply(this, arguments);
}
Module.toString = function () {
return _Module.toString();
};
return Module;
}(function (path) {
module = path.node;
})
});
var moduleContext = moduleContextFromModuleAST(module); // Transform the actual instruction in function bodies
traverse(ast, {
Func: function (_Func) {
function Func(_x2) {
return _Func.apply(this, arguments);
}
Func.toString = function () {
return _Func.toString();
};
return Func;
}(function (path) {
transformFuncPath(path, moduleContext);
}),
Start: function (_Start) {
function Start(_x3) {
return _Start.apply(this, arguments);
}
Start.toString = function () {
return _Start.toString();
};
return Start;
}(function (path) {
var index = path.node.index;
if (isIdentifier(index) === true) {
var offsetInModule = moduleContext.getFunctionOffsetByIdentifier(index.value);
if (typeof offsetInModule === "undefined") {
throw newUnexpectedFunction(index.value);
} // Replace the index Identifier
// $FlowIgnore: reference?
path.node.index = numberLiteralFromRaw(offsetInModule);
}
})
});
}
function transformFuncPath(funcPath, moduleContext) {
var funcNode = funcPath.node;
var signature = funcNode.signature;
if (signature.type !== "Signature") {
throw new Error("Function signatures must be denormalised before execution");
}
var params = signature.params; // Add func locals in the context
params.forEach(function (p) {
return moduleContext.addLocal(p.valtype);
});
traverse(funcNode, {
Instr: function (_Instr) {
function Instr(_x4) {
return _Instr.apply(this, arguments);
}
Instr.toString = function () {
return _Instr.toString();
};
return Instr;
}(function (instrPath) {
var instrNode = instrPath.node;
/**
* Local access
*/
if (instrNode.id === "get_local" || instrNode.id === "set_local" || instrNode.id === "tee_local") {
var _instrNode$args = _slicedToArray(instrNode.args, 1),
firstArg = _instrNode$args[0];
if (firstArg.type === "Identifier") {
var offsetInParams = params.findIndex(function (_ref) {
var id = _ref.id;
return id === firstArg.value;
});
if (offsetInParams === -1) {
throw new Error("".concat(firstArg.value, " not found in ").concat(instrNode.id, ": not declared in func params"));
} // Replace the Identifer node by our new NumberLiteral node
instrNode.args[0] = numberLiteralFromRaw(offsetInParams);
}
}
/**
* Global access
*/
if (instrNode.id === "get_global" || instrNode.id === "set_global") {
var _instrNode$args2 = _slicedToArray(instrNode.args, 1),
_firstArg = _instrNode$args2[0];
if (isIdentifier(_firstArg) === true) {
var globalOffset = moduleContext.getGlobalOffsetByIdentifier( // $FlowIgnore: reference?
_firstArg.value);
if (typeof globalOffset === "undefined") {
// $FlowIgnore: reference?
throw new Error("global ".concat(_firstArg.value, " not found in module"));
} // Replace the Identifer node by our new NumberLiteral node
instrNode.args[0] = numberLiteralFromRaw(globalOffset);
}
}
/**
* Labels lookup
*/
if (instrNode.id === "br") {
var _instrNode$args3 = _slicedToArray(instrNode.args, 1),
_firstArg2 = _instrNode$args3[0];
if (isIdentifier(_firstArg2) === true) {
// if the labels is not found it is going to be replaced with -1
// which is invalid.
var relativeBlockCount = -1; // $FlowIgnore: reference?
instrPath.findParent(function (_ref2) {
var node = _ref2.node;
if (isBlock(node)) {
relativeBlockCount++; // $FlowIgnore: reference?
var name = node.label || node.name;
if (_typeof(name) === "object") {
// $FlowIgnore: isIdentifier ensures that
if (name.value === _firstArg2.value) {
// Found it
return false;
}
}
}
if (isFunc(node)) {
return false;
}
}); // Replace the Identifer node by our new NumberLiteral node
instrNode.args[0] = numberLiteralFromRaw(relativeBlockCount);
}
}
}),
/**
* Func lookup
*/
CallInstruction: function (_CallInstruction) {
function CallInstruction(_x5) {
return _CallInstruction.apply(this, arguments);
}
CallInstruction.toString = function () {
return _CallInstruction.toString();
};
return CallInstruction;
}(function (_ref3) {
var node = _ref3.node;
var index = node.index;
if (isIdentifier(index) === true) {
var offsetInModule = moduleContext.getFunctionOffsetByIdentifier(index.value);
if (typeof offsetInModule === "undefined") {
throw newUnexpectedFunction(index.value);
} // Replace the index Identifier
// $FlowIgnore: reference?
node.index = numberLiteralFromRaw(offsetInModule);
}
})
});
}

View File

@@ -1,96 +0,0 @@
import { createPath } from "./node-path";
import { unionTypesMap, nodeAndUnionTypes } from "./nodes"; // recursively walks the AST starting at the given node. The callback is invoked for
// and object that has a 'type' property.
function walk(context, callback) {
var stop = false;
function innerWalk(context, callback) {
if (stop) {
return;
}
var node = context.node;
if (node === undefined) {
console.warn("traversing with an empty context");
return;
}
if (node._deleted === true) {
return;
}
var path = createPath(context);
callback(node.type, path);
if (path.shouldStop) {
stop = true;
return;
}
Object.keys(node).forEach(function (prop) {
var value = node[prop];
if (value === null || value === undefined) {
return;
}
var valueAsArray = Array.isArray(value) ? value : [value];
valueAsArray.forEach(function (childNode) {
if (typeof childNode.type === "string") {
var childContext = {
node: childNode,
parentKey: prop,
parentPath: path,
shouldStop: false,
inList: Array.isArray(value)
};
innerWalk(childContext, callback);
}
});
});
}
innerWalk(context, callback);
}
var noop = function noop() {};
export function traverse(node, visitors) {
var before = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : noop;
var after = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : noop;
Object.keys(visitors).forEach(function (visitor) {
if (!nodeAndUnionTypes.includes(visitor)) {
throw new Error("Unexpected visitor ".concat(visitor));
}
});
var context = {
node: node,
inList: false,
shouldStop: false,
parentPath: null,
parentKey: null
};
walk(context, function (type, path) {
if (typeof visitors[type] === "function") {
before(type, path);
visitors[type](path);
after(type, path);
}
var unionTypes = unionTypesMap[type];
if (!unionTypes) {
throw new Error("Unexpected node type ".concat(type));
}
unionTypes.forEach(function (unionType) {
if (typeof visitors[unionType] === "function") {
before(unionType, path);
visitors[unionType](path);
after(unionType, path);
}
});
});
}

View File

View File

View File

@@ -1,265 +0,0 @@
function _sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
function _slicedToArray(arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return _sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
import { signatures } from "./signatures";
import { traverse } from "./traverse";
import constants from "@webassemblyjs/helper-wasm-bytecode";
import { getSectionForNode } from "@webassemblyjs/helper-wasm-bytecode";
export function isAnonymous(ident) {
return ident.raw === "";
}
export function getSectionMetadata(ast, name) {
var section;
traverse(ast, {
SectionMetadata: function (_SectionMetadata) {
function SectionMetadata(_x) {
return _SectionMetadata.apply(this, arguments);
}
SectionMetadata.toString = function () {
return _SectionMetadata.toString();
};
return SectionMetadata;
}(function (_ref) {
var node = _ref.node;
if (node.section === name) {
section = node;
}
})
});
return section;
}
export function getSectionMetadatas(ast, name) {
var sections = [];
traverse(ast, {
SectionMetadata: function (_SectionMetadata2) {
function SectionMetadata(_x2) {
return _SectionMetadata2.apply(this, arguments);
}
SectionMetadata.toString = function () {
return _SectionMetadata2.toString();
};
return SectionMetadata;
}(function (_ref2) {
var node = _ref2.node;
if (node.section === name) {
sections.push(node);
}
})
});
return sections;
}
export function sortSectionMetadata(m) {
if (m.metadata == null) {
console.warn("sortSectionMetadata: no metadata to sort");
return;
} // $FlowIgnore
m.metadata.sections.sort(function (a, b) {
var aId = constants.sections[a.section];
var bId = constants.sections[b.section];
if (typeof aId !== "number" || typeof bId !== "number") {
throw new Error("Section id not found");
}
return aId - bId;
});
}
export function orderedInsertNode(m, n) {
assertHasLoc(n);
var didInsert = false;
if (n.type === "ModuleExport") {
m.fields.push(n);
return;
}
m.fields = m.fields.reduce(function (acc, field) {
var fieldEndCol = Infinity;
if (field.loc != null) {
// $FlowIgnore
fieldEndCol = field.loc.end.column;
} // $FlowIgnore: assertHasLoc ensures that
if (didInsert === false && n.loc.start.column < fieldEndCol) {
didInsert = true;
acc.push(n);
}
acc.push(field);
return acc;
}, []); // Handles empty modules or n is the last element
if (didInsert === false) {
m.fields.push(n);
}
}
export function assertHasLoc(n) {
if (n.loc == null || n.loc.start == null || n.loc.end == null) {
throw new Error("Internal failure: node (".concat(JSON.stringify(n.type), ") has no location information"));
}
}
export function getEndOfSection(s) {
assertHasLoc(s.size);
return s.startOffset + s.size.value + ( // $FlowIgnore
s.size.loc.end.column - s.size.loc.start.column);
}
export function shiftLoc(node, delta) {
// $FlowIgnore
node.loc.start.column += delta; // $FlowIgnore
node.loc.end.column += delta;
}
export function shiftSection(ast, node, delta) {
if (node.type !== "SectionMetadata") {
throw new Error("Can not shift node " + JSON.stringify(node.type));
}
node.startOffset += delta;
if (_typeof(node.size.loc) === "object") {
shiftLoc(node.size, delta);
} // Custom sections doesn't have vectorOfSize
if (_typeof(node.vectorOfSize) === "object" && _typeof(node.vectorOfSize.loc) === "object") {
shiftLoc(node.vectorOfSize, delta);
}
var sectionName = node.section; // shift node locations within that section
traverse(ast, {
Node: function Node(_ref3) {
var node = _ref3.node;
var section = getSectionForNode(node);
if (section === sectionName && _typeof(node.loc) === "object") {
shiftLoc(node, delta);
}
}
});
}
export function signatureForOpcode(object, name) {
var opcodeName = name;
if (object !== undefined && object !== "") {
opcodeName = object + "." + name;
}
var sign = signatures[opcodeName];
if (sign == undefined) {
// TODO: Uncomment this when br_table and others has been done
//throw new Error("Invalid opcode: "+opcodeName);
return [object, object];
}
return sign[0];
}
export function getUniqueNameGenerator() {
var inc = {};
return function () {
var prefix = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : "temp";
if (!(prefix in inc)) {
inc[prefix] = 0;
} else {
inc[prefix] = inc[prefix] + 1;
}
return prefix + "_" + inc[prefix];
};
}
export function getStartByteOffset(n) {
// $FlowIgnore
if (typeof n.loc === "undefined" || typeof n.loc.start === "undefined") {
throw new Error( // $FlowIgnore
"Can not get byte offset without loc informations, node: " + String(n.id));
}
return n.loc.start.column;
}
export function getEndByteOffset(n) {
// $FlowIgnore
if (typeof n.loc === "undefined" || typeof n.loc.end === "undefined") {
throw new Error("Can not get byte offset without loc informations, node: " + n.type);
}
return n.loc.end.column;
}
export function getFunctionBeginingByteOffset(n) {
if (!(n.body.length > 0)) {
throw new Error('n.body.length > 0' + " error: " + (undefined || "unknown"));
}
var _n$body = _slicedToArray(n.body, 1),
firstInstruction = _n$body[0];
return getStartByteOffset(firstInstruction);
}
export function getEndBlockByteOffset(n) {
// $FlowIgnore
if (!(n.instr.length > 0 || n.body.length > 0)) {
throw new Error('n.instr.length > 0 || n.body.length > 0' + " error: " + (undefined || "unknown"));
}
var lastInstruction;
if (n.instr) {
// $FlowIgnore
lastInstruction = n.instr[n.instr.length - 1];
}
if (n.body) {
// $FlowIgnore
lastInstruction = n.body[n.body.length - 1];
}
if (!(_typeof(lastInstruction) === "object")) {
throw new Error('typeof lastInstruction === "object"' + " error: " + (undefined || "unknown"));
}
// $FlowIgnore
return getStartByteOffset(lastInstruction);
}
export function getStartBlockByteOffset(n) {
// $FlowIgnore
if (!(n.instr.length > 0 || n.body.length > 0)) {
throw new Error('n.instr.length > 0 || n.body.length > 0' + " error: " + (undefined || "unknown"));
}
var fistInstruction;
if (n.instr) {
// $FlowIgnore
var _n$instr = _slicedToArray(n.instr, 1);
fistInstruction = _n$instr[0];
}
if (n.body) {
// $FlowIgnore
var _n$body2 = _slicedToArray(n.body, 1);
fistInstruction = _n$body2[0];
}
if (!(_typeof(fistInstruction) === "object")) {
throw new Error('typeof fistInstruction === "object"' + " error: " + (undefined || "unknown"));
}
// $FlowIgnore
return getStartByteOffset(fistInstruction);
}

View File

@@ -1,17 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.cloneNode = cloneNode;
function cloneNode(n) {
// $FlowIgnore
var newObj = {};
for (var k in n) {
newObj[k] = n[k];
}
return newObj;
}

View File

@@ -1,663 +0,0 @@
var definitions = {};
function defineType(typeName, metadata) {
definitions[typeName] = metadata;
}
defineType("Module", {
spec: {
wasm: "https://webassembly.github.io/spec/core/binary/modules.html#binary-module",
wat: "https://webassembly.github.io/spec/core/text/modules.html#text-module"
},
doc: "A module consists of a sequence of sections (termed fields in the text format).",
unionType: ["Node"],
fields: {
id: {
maybe: true,
type: "string"
},
fields: {
array: true,
type: "Node"
},
metadata: {
optional: true,
type: "ModuleMetadata"
}
}
});
defineType("ModuleMetadata", {
unionType: ["Node"],
fields: {
sections: {
array: true,
type: "SectionMetadata"
},
functionNames: {
optional: true,
array: true,
type: "FunctionNameMetadata"
},
localNames: {
optional: true,
array: true,
type: "ModuleMetadata"
},
producers: {
optional: true,
array: true,
type: "ProducersSectionMetadata"
}
}
});
defineType("ModuleNameMetadata", {
unionType: ["Node"],
fields: {
value: {
type: "string"
}
}
});
defineType("FunctionNameMetadata", {
unionType: ["Node"],
fields: {
value: {
type: "string"
},
index: {
type: "number"
}
}
});
defineType("LocalNameMetadata", {
unionType: ["Node"],
fields: {
value: {
type: "string"
},
localIndex: {
type: "number"
},
functionIndex: {
type: "number"
}
}
});
defineType("BinaryModule", {
unionType: ["Node"],
fields: {
id: {
maybe: true,
type: "string"
},
blob: {
array: true,
type: "string"
}
}
});
defineType("QuoteModule", {
unionType: ["Node"],
fields: {
id: {
maybe: true,
type: "string"
},
string: {
array: true,
type: "string"
}
}
});
defineType("SectionMetadata", {
unionType: ["Node"],
fields: {
section: {
type: "SectionName"
},
startOffset: {
type: "number"
},
size: {
type: "NumberLiteral"
},
vectorOfSize: {
comment: "Size of the vector in the section (if any)",
type: "NumberLiteral"
}
}
});
defineType("ProducersSectionMetadata", {
unionType: ["Node"],
fields: {
producers: {
array: true,
type: "ProducerMetadata"
}
}
});
defineType("ProducerMetadata", {
unionType: ["Node"],
fields: {
language: {
type: "ProducerMetadataVersionedName",
array: true
},
processedBy: {
type: "ProducerMetadataVersionedName",
array: true
},
sdk: {
type: "ProducerMetadataVersionedName",
array: true
}
}
});
defineType("ProducerMetadataVersionedName", {
unionType: ["Node"],
fields: {
name: {
type: "string"
},
version: {
type: "string"
}
}
});
/*
Instructions
*/
defineType("LoopInstruction", {
unionType: ["Node", "Block", "Instruction"],
fields: {
id: {
constant: true,
type: "string",
value: "loop"
},
label: {
maybe: true,
type: "Identifier"
},
resulttype: {
maybe: true,
type: "Valtype"
},
instr: {
array: true,
type: "Instruction"
}
}
});
defineType("Instr", {
unionType: ["Node", "Expression", "Instruction"],
fields: {
id: {
type: "string"
},
object: {
optional: true,
type: "Valtype"
},
args: {
array: true,
type: "Expression"
},
namedArgs: {
optional: true,
type: "Object"
}
}
});
defineType("IfInstruction", {
unionType: ["Node", "Instruction"],
fields: {
id: {
constant: true,
type: "string",
value: "if"
},
testLabel: {
comment: "only for WAST",
type: "Identifier"
},
test: {
array: true,
type: "Instruction"
},
result: {
maybe: true,
type: "Valtype"
},
consequent: {
array: true,
type: "Instruction"
},
alternate: {
array: true,
type: "Instruction"
}
}
});
/*
Concrete value types
*/
defineType("StringLiteral", {
unionType: ["Node", "Expression"],
fields: {
value: {
type: "string"
}
}
});
defineType("NumberLiteral", {
unionType: ["Node", "NumericLiteral", "Expression"],
fields: {
value: {
type: "number"
},
raw: {
type: "string"
}
}
});
defineType("LongNumberLiteral", {
unionType: ["Node", "NumericLiteral", "Expression"],
fields: {
value: {
type: "LongNumber"
},
raw: {
type: "string"
}
}
});
defineType("FloatLiteral", {
unionType: ["Node", "NumericLiteral", "Expression"],
fields: {
value: {
type: "number"
},
nan: {
optional: true,
type: "boolean"
},
inf: {
optional: true,
type: "boolean"
},
raw: {
type: "string"
}
}
});
defineType("Elem", {
unionType: ["Node"],
fields: {
table: {
type: "Index"
},
offset: {
array: true,
type: "Instruction"
},
funcs: {
array: true,
type: "Index"
}
}
});
defineType("IndexInFuncSection", {
unionType: ["Node"],
fields: {
index: {
type: "Index"
}
}
});
defineType("ValtypeLiteral", {
unionType: ["Node", "Expression"],
fields: {
name: {
type: "Valtype"
}
}
});
defineType("TypeInstruction", {
unionType: ["Node", "Instruction"],
fields: {
id: {
maybe: true,
type: "Index"
},
functype: {
type: "Signature"
}
}
});
defineType("Start", {
unionType: ["Node"],
fields: {
index: {
type: "Index"
}
}
});
defineType("GlobalType", {
unionType: ["Node", "ImportDescr"],
fields: {
valtype: {
type: "Valtype"
},
mutability: {
type: "Mutability"
}
}
});
defineType("LeadingComment", {
unionType: ["Node"],
fields: {
value: {
type: "string"
}
}
});
defineType("BlockComment", {
unionType: ["Node"],
fields: {
value: {
type: "string"
}
}
});
defineType("Data", {
unionType: ["Node"],
fields: {
memoryIndex: {
type: "Memidx"
},
offset: {
type: "Instruction"
},
init: {
type: "ByteArray"
}
}
});
defineType("Global", {
unionType: ["Node"],
fields: {
globalType: {
type: "GlobalType"
},
init: {
array: true,
type: "Instruction"
},
name: {
maybe: true,
type: "Identifier"
}
}
});
defineType("Table", {
unionType: ["Node", "ImportDescr"],
fields: {
elementType: {
type: "TableElementType"
},
limits: {
assertNodeType: true,
type: "Limit"
},
name: {
maybe: true,
type: "Identifier"
},
elements: {
array: true,
optional: true,
type: "Index"
}
}
});
defineType("Memory", {
unionType: ["Node", "ImportDescr"],
fields: {
limits: {
type: "Limit"
},
id: {
maybe: true,
type: "Index"
}
}
});
defineType("FuncImportDescr", {
unionType: ["Node", "ImportDescr"],
fields: {
id: {
type: "Identifier"
},
signature: {
type: "Signature"
}
}
});
defineType("ModuleImport", {
unionType: ["Node"],
fields: {
module: {
type: "string"
},
name: {
type: "string"
},
descr: {
type: "ImportDescr"
}
}
});
defineType("ModuleExportDescr", {
unionType: ["Node"],
fields: {
exportType: {
type: "ExportDescrType"
},
id: {
type: "Index"
}
}
});
defineType("ModuleExport", {
unionType: ["Node"],
fields: {
name: {
type: "string"
},
descr: {
type: "ModuleExportDescr"
}
}
});
defineType("Limit", {
unionType: ["Node"],
fields: {
min: {
type: "number"
},
max: {
optional: true,
type: "number"
}
}
});
defineType("Signature", {
unionType: ["Node"],
fields: {
params: {
array: true,
type: "FuncParam"
},
results: {
array: true,
type: "Valtype"
}
}
});
defineType("Program", {
unionType: ["Node"],
fields: {
body: {
array: true,
type: "Node"
}
}
});
defineType("Identifier", {
unionType: ["Node", "Expression"],
fields: {
value: {
type: "string"
},
raw: {
optional: true,
type: "string"
}
}
});
defineType("BlockInstruction", {
unionType: ["Node", "Block", "Instruction"],
fields: {
id: {
constant: true,
type: "string",
value: "block"
},
label: {
maybe: true,
type: "Identifier"
},
instr: {
array: true,
type: "Instruction"
},
result: {
maybe: true,
type: "Valtype"
}
}
});
defineType("CallInstruction", {
unionType: ["Node", "Instruction"],
fields: {
id: {
constant: true,
type: "string",
value: "call"
},
index: {
type: "Index"
},
instrArgs: {
array: true,
optional: true,
type: "Expression"
},
numeric: {
type: "Index",
optional: true
}
}
});
defineType("CallIndirectInstruction", {
unionType: ["Node", "Instruction"],
fields: {
id: {
constant: true,
type: "string",
value: "call_indirect"
},
signature: {
type: "SignatureOrTypeRef"
},
intrs: {
array: true,
optional: true,
type: "Expression"
}
}
});
defineType("ByteArray", {
unionType: ["Node"],
fields: {
values: {
array: true,
type: "Byte"
}
}
});
defineType("Func", {
unionType: ["Node", "Block"],
fields: {
name: {
maybe: true,
type: "Index"
},
signature: {
type: "SignatureOrTypeRef"
},
body: {
array: true,
type: "Instruction"
},
isExternal: {
comment: "means that it has been imported from the outside js",
optional: true,
type: "boolean"
},
metadata: {
optional: true,
type: "FuncMetadata"
}
}
});
/**
* Intrinsics
*/
defineType("InternalBrUnless", {
unionType: ["Node", "Intrinsic"],
fields: {
target: {
type: "number"
}
}
});
defineType("InternalGoto", {
unionType: ["Node", "Intrinsic"],
fields: {
target: {
type: "number"
}
}
});
defineType("InternalCallExtern", {
unionType: ["Node", "Intrinsic"],
fields: {
target: {
type: "number"
}
}
}); // function bodies are terminated by an `end` instruction but are missing a
// return instruction
//
// Since we can't inject a new instruction we are injecting a new instruction.
defineType("InternalEndAndReturn", {
unionType: ["Node", "Intrinsic"],
fields: {}
});
module.exports = definitions;

View File

@@ -1,118 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _exportNames = {
numberLiteralFromRaw: true,
withLoc: true,
withRaw: true,
funcParam: true,
indexLiteral: true,
memIndexLiteral: true,
instruction: true,
objectInstruction: true,
traverse: true,
signatures: true,
cloneNode: true
};
Object.defineProperty(exports, "numberLiteralFromRaw", {
enumerable: true,
get: function get() {
return _nodeHelpers.numberLiteralFromRaw;
}
});
Object.defineProperty(exports, "withLoc", {
enumerable: true,
get: function get() {
return _nodeHelpers.withLoc;
}
});
Object.defineProperty(exports, "withRaw", {
enumerable: true,
get: function get() {
return _nodeHelpers.withRaw;
}
});
Object.defineProperty(exports, "funcParam", {
enumerable: true,
get: function get() {
return _nodeHelpers.funcParam;
}
});
Object.defineProperty(exports, "indexLiteral", {
enumerable: true,
get: function get() {
return _nodeHelpers.indexLiteral;
}
});
Object.defineProperty(exports, "memIndexLiteral", {
enumerable: true,
get: function get() {
return _nodeHelpers.memIndexLiteral;
}
});
Object.defineProperty(exports, "instruction", {
enumerable: true,
get: function get() {
return _nodeHelpers.instruction;
}
});
Object.defineProperty(exports, "objectInstruction", {
enumerable: true,
get: function get() {
return _nodeHelpers.objectInstruction;
}
});
Object.defineProperty(exports, "traverse", {
enumerable: true,
get: function get() {
return _traverse.traverse;
}
});
Object.defineProperty(exports, "signatures", {
enumerable: true,
get: function get() {
return _signatures.signatures;
}
});
Object.defineProperty(exports, "cloneNode", {
enumerable: true,
get: function get() {
return _clone.cloneNode;
}
});
var _nodes = require("./nodes");
Object.keys(_nodes).forEach(function (key) {
if (key === "default" || key === "__esModule") return;
if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function get() {
return _nodes[key];
}
});
});
var _nodeHelpers = require("./node-helpers.js");
var _traverse = require("./traverse");
var _signatures = require("./signatures");
var _utils = require("./utils");
Object.keys(_utils).forEach(function (key) {
if (key === "default" || key === "__esModule") return;
if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function get() {
return _utils[key];
}
});
});
var _clone = require("./clone");

View File

@@ -1,107 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.numberLiteralFromRaw = numberLiteralFromRaw;
exports.instruction = instruction;
exports.objectInstruction = objectInstruction;
exports.withLoc = withLoc;
exports.withRaw = withRaw;
exports.funcParam = funcParam;
exports.indexLiteral = indexLiteral;
exports.memIndexLiteral = memIndexLiteral;
var _wastParser = require("@webassemblyjs/wast-parser");
var _nodes = require("./nodes");
function numberLiteralFromRaw(rawValue) {
var instructionType = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : "i32";
var original = rawValue; // Remove numeric separators _
if (typeof rawValue === "string") {
rawValue = rawValue.replace(/_/g, "");
}
if (typeof rawValue === "number") {
return (0, _nodes.numberLiteral)(rawValue, String(original));
} else {
switch (instructionType) {
case "i32":
{
return (0, _nodes.numberLiteral)((0, _wastParser.parse32I)(rawValue), String(original));
}
case "u32":
{
return (0, _nodes.numberLiteral)((0, _wastParser.parseU32)(rawValue), String(original));
}
case "i64":
{
return (0, _nodes.longNumberLiteral)((0, _wastParser.parse64I)(rawValue), String(original));
}
case "f32":
{
return (0, _nodes.floatLiteral)((0, _wastParser.parse32F)(rawValue), (0, _wastParser.isNanLiteral)(rawValue), (0, _wastParser.isInfLiteral)(rawValue), String(original));
}
// f64
default:
{
return (0, _nodes.floatLiteral)((0, _wastParser.parse64F)(rawValue), (0, _wastParser.isNanLiteral)(rawValue), (0, _wastParser.isInfLiteral)(rawValue), String(original));
}
}
}
}
function instruction(id) {
var args = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : [];
var namedArgs = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
return (0, _nodes.instr)(id, undefined, args, namedArgs);
}
function objectInstruction(id, object) {
var args = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : [];
var namedArgs = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
return (0, _nodes.instr)(id, object, args, namedArgs);
}
/**
* Decorators
*/
function withLoc(n, end, start) {
var loc = {
start: start,
end: end
};
n.loc = loc;
return n;
}
function withRaw(n, raw) {
n.raw = raw;
return n;
}
function funcParam(valtype, id) {
return {
id: id,
valtype: valtype
};
}
function indexLiteral(value) {
// $FlowIgnore
var x = numberLiteralFromRaw(value, "u32");
return x;
}
function memIndexLiteral(value) {
// $FlowIgnore
var x = numberLiteralFromRaw(value, "u32");
return x;
}

View File

@@ -1,144 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.createPath = createPath;
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
function findParent(_ref, cb) {
var parentPath = _ref.parentPath;
if (parentPath == null) {
throw new Error("node is root");
}
var currentPath = parentPath;
while (cb(currentPath) !== false) {
// Hit the root node, stop
// $FlowIgnore
if (currentPath.parentPath == null) {
return null;
} // $FlowIgnore
currentPath = currentPath.parentPath;
}
return currentPath.node;
}
function insertBefore(context, newNode) {
return insert(context, newNode);
}
function insertAfter(context, newNode) {
return insert(context, newNode, 1);
}
function insert(_ref2, newNode) {
var node = _ref2.node,
inList = _ref2.inList,
parentPath = _ref2.parentPath,
parentKey = _ref2.parentKey;
var indexOffset = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
if (!inList) {
throw new Error('inList' + " error: " + ("insert can only be used for nodes that are within lists" || "unknown"));
}
if (!(parentPath != null)) {
throw new Error('parentPath != null' + " error: " + ("Can not remove root node" || "unknown"));
}
// $FlowIgnore
var parentList = parentPath.node[parentKey];
var indexInList = parentList.findIndex(function (n) {
return n === node;
});
parentList.splice(indexInList + indexOffset, 0, newNode);
}
function remove(_ref3) {
var node = _ref3.node,
parentKey = _ref3.parentKey,
parentPath = _ref3.parentPath;
if (!(parentPath != null)) {
throw new Error('parentPath != null' + " error: " + ("Can not remove root node" || "unknown"));
}
// $FlowIgnore
var parentNode = parentPath.node; // $FlowIgnore
var parentProperty = parentNode[parentKey];
if (Array.isArray(parentProperty)) {
// $FlowIgnore
parentNode[parentKey] = parentProperty.filter(function (n) {
return n !== node;
});
} else {
// $FlowIgnore
delete parentNode[parentKey];
}
node._deleted = true;
}
function stop(context) {
context.shouldStop = true;
}
function replaceWith(context, newNode) {
// $FlowIgnore
var parentNode = context.parentPath.node; // $FlowIgnore
var parentProperty = parentNode[context.parentKey];
if (Array.isArray(parentProperty)) {
var indexInList = parentProperty.findIndex(function (n) {
return n === context.node;
});
parentProperty.splice(indexInList, 1, newNode);
} else {
// $FlowIgnore
parentNode[context.parentKey] = newNode;
}
context.node._deleted = true;
context.node = newNode;
} // bind the context to the first argument of node operations
function bindNodeOperations(operations, context) {
var keys = Object.keys(operations);
var boundOperations = {};
keys.forEach(function (key) {
boundOperations[key] = operations[key].bind(null, context);
});
return boundOperations;
}
function createPathOperations(context) {
// $FlowIgnore
return bindNodeOperations({
findParent: findParent,
replaceWith: replaceWith,
remove: remove,
insertBefore: insertBefore,
insertAfter: insertAfter,
stop: stop
}, context);
}
function createPath(context) {
var path = _extends({}, context); // $FlowIgnore
Object.assign(path, createPathOperations(path)); // $FlowIgnore
return path;
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,207 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.signatures = void 0;
function sign(input, output) {
return [input, output];
}
var u32 = "u32";
var i32 = "i32";
var i64 = "i64";
var f32 = "f32";
var f64 = "f64";
var vector = function vector(t) {
var vecType = [t]; // $FlowIgnore
vecType.vector = true;
return vecType;
};
var controlInstructions = {
unreachable: sign([], []),
nop: sign([], []),
// block ?
// loop ?
// if ?
// if else ?
br: sign([u32], []),
br_if: sign([u32], []),
br_table: sign(vector(u32), []),
return: sign([], []),
call: sign([u32], []),
call_indirect: sign([u32], [])
};
var parametricInstructions = {
drop: sign([], []),
select: sign([], [])
};
var variableInstructions = {
get_local: sign([u32], []),
set_local: sign([u32], []),
tee_local: sign([u32], []),
get_global: sign([u32], []),
set_global: sign([u32], [])
};
var memoryInstructions = {
"i32.load": sign([u32, u32], [i32]),
"i64.load": sign([u32, u32], []),
"f32.load": sign([u32, u32], []),
"f64.load": sign([u32, u32], []),
"i32.load8_s": sign([u32, u32], [i32]),
"i32.load8_u": sign([u32, u32], [i32]),
"i32.load16_s": sign([u32, u32], [i32]),
"i32.load16_u": sign([u32, u32], [i32]),
"i64.load8_s": sign([u32, u32], [i64]),
"i64.load8_u": sign([u32, u32], [i64]),
"i64.load16_s": sign([u32, u32], [i64]),
"i64.load16_u": sign([u32, u32], [i64]),
"i64.load32_s": sign([u32, u32], [i64]),
"i64.load32_u": sign([u32, u32], [i64]),
"i32.store": sign([u32, u32], []),
"i64.store": sign([u32, u32], []),
"f32.store": sign([u32, u32], []),
"f64.store": sign([u32, u32], []),
"i32.store8": sign([u32, u32], []),
"i32.store16": sign([u32, u32], []),
"i64.store8": sign([u32, u32], []),
"i64.store16": sign([u32, u32], []),
"i64.store32": sign([u32, u32], []),
current_memory: sign([], []),
grow_memory: sign([], [])
};
var numericInstructions = {
"i32.const": sign([i32], [i32]),
"i64.const": sign([i64], [i64]),
"f32.const": sign([f32], [f32]),
"f64.const": sign([f64], [f64]),
"i32.eqz": sign([i32], [i32]),
"i32.eq": sign([i32, i32], [i32]),
"i32.ne": sign([i32, i32], [i32]),
"i32.lt_s": sign([i32, i32], [i32]),
"i32.lt_u": sign([i32, i32], [i32]),
"i32.gt_s": sign([i32, i32], [i32]),
"i32.gt_u": sign([i32, i32], [i32]),
"i32.le_s": sign([i32, i32], [i32]),
"i32.le_u": sign([i32, i32], [i32]),
"i32.ge_s": sign([i32, i32], [i32]),
"i32.ge_u": sign([i32, i32], [i32]),
"i64.eqz": sign([i64], [i64]),
"i64.eq": sign([i64, i64], [i32]),
"i64.ne": sign([i64, i64], [i32]),
"i64.lt_s": sign([i64, i64], [i32]),
"i64.lt_u": sign([i64, i64], [i32]),
"i64.gt_s": sign([i64, i64], [i32]),
"i64.gt_u": sign([i64, i64], [i32]),
"i64.le_s": sign([i64, i64], [i32]),
"i64.le_u": sign([i64, i64], [i32]),
"i64.ge_s": sign([i64, i64], [i32]),
"i64.ge_u": sign([i64, i64], [i32]),
"f32.eq": sign([f32, f32], [i32]),
"f32.ne": sign([f32, f32], [i32]),
"f32.lt": sign([f32, f32], [i32]),
"f32.gt": sign([f32, f32], [i32]),
"f32.le": sign([f32, f32], [i32]),
"f32.ge": sign([f32, f32], [i32]),
"f64.eq": sign([f64, f64], [i32]),
"f64.ne": sign([f64, f64], [i32]),
"f64.lt": sign([f64, f64], [i32]),
"f64.gt": sign([f64, f64], [i32]),
"f64.le": sign([f64, f64], [i32]),
"f64.ge": sign([f64, f64], [i32]),
"i32.clz": sign([i32], [i32]),
"i32.ctz": sign([i32], [i32]),
"i32.popcnt": sign([i32], [i32]),
"i32.add": sign([i32, i32], [i32]),
"i32.sub": sign([i32, i32], [i32]),
"i32.mul": sign([i32, i32], [i32]),
"i32.div_s": sign([i32, i32], [i32]),
"i32.div_u": sign([i32, i32], [i32]),
"i32.rem_s": sign([i32, i32], [i32]),
"i32.rem_u": sign([i32, i32], [i32]),
"i32.and": sign([i32, i32], [i32]),
"i32.or": sign([i32, i32], [i32]),
"i32.xor": sign([i32, i32], [i32]),
"i32.shl": sign([i32, i32], [i32]),
"i32.shr_s": sign([i32, i32], [i32]),
"i32.shr_u": sign([i32, i32], [i32]),
"i32.rotl": sign([i32, i32], [i32]),
"i32.rotr": sign([i32, i32], [i32]),
"i64.clz": sign([i64], [i64]),
"i64.ctz": sign([i64], [i64]),
"i64.popcnt": sign([i64], [i64]),
"i64.add": sign([i64, i64], [i64]),
"i64.sub": sign([i64, i64], [i64]),
"i64.mul": sign([i64, i64], [i64]),
"i64.div_s": sign([i64, i64], [i64]),
"i64.div_u": sign([i64, i64], [i64]),
"i64.rem_s": sign([i64, i64], [i64]),
"i64.rem_u": sign([i64, i64], [i64]),
"i64.and": sign([i64, i64], [i64]),
"i64.or": sign([i64, i64], [i64]),
"i64.xor": sign([i64, i64], [i64]),
"i64.shl": sign([i64, i64], [i64]),
"i64.shr_s": sign([i64, i64], [i64]),
"i64.shr_u": sign([i64, i64], [i64]),
"i64.rotl": sign([i64, i64], [i64]),
"i64.rotr": sign([i64, i64], [i64]),
"f32.abs": sign([f32], [f32]),
"f32.neg": sign([f32], [f32]),
"f32.ceil": sign([f32], [f32]),
"f32.floor": sign([f32], [f32]),
"f32.trunc": sign([f32], [f32]),
"f32.nearest": sign([f32], [f32]),
"f32.sqrt": sign([f32], [f32]),
"f32.add": sign([f32, f32], [f32]),
"f32.sub": sign([f32, f32], [f32]),
"f32.mul": sign([f32, f32], [f32]),
"f32.div": sign([f32, f32], [f32]),
"f32.min": sign([f32, f32], [f32]),
"f32.max": sign([f32, f32], [f32]),
"f32.copysign": sign([f32, f32], [f32]),
"f64.abs": sign([f64], [f64]),
"f64.neg": sign([f64], [f64]),
"f64.ceil": sign([f64], [f64]),
"f64.floor": sign([f64], [f64]),
"f64.trunc": sign([f64], [f64]),
"f64.nearest": sign([f64], [f64]),
"f64.sqrt": sign([f64], [f64]),
"f64.add": sign([f64, f64], [f64]),
"f64.sub": sign([f64, f64], [f64]),
"f64.mul": sign([f64, f64], [f64]),
"f64.div": sign([f64, f64], [f64]),
"f64.min": sign([f64, f64], [f64]),
"f64.max": sign([f64, f64], [f64]),
"f64.copysign": sign([f64, f64], [f64]),
"i32.wrap/i64": sign([i64], [i32]),
"i32.trunc_s/f32": sign([f32], [i32]),
"i32.trunc_u/f32": sign([f32], [i32]),
"i32.trunc_s/f64": sign([f32], [i32]),
"i32.trunc_u/f64": sign([f64], [i32]),
"i64.extend_s/i32": sign([i32], [i64]),
"i64.extend_u/i32": sign([i32], [i64]),
"i64.trunc_s/f32": sign([f32], [i64]),
"i64.trunc_u/f32": sign([f32], [i64]),
"i64.trunc_s/f64": sign([f64], [i64]),
"i64.trunc_u/f64": sign([f64], [i64]),
"f32.convert_s/i32": sign([i32], [f32]),
"f32.convert_u/i32": sign([i32], [f32]),
"f32.convert_s/i64": sign([i64], [f32]),
"f32.convert_u/i64": sign([i64], [f32]),
"f32.demote/f64": sign([f64], [f32]),
"f64.convert_s/i32": sign([i32], [f64]),
"f64.convert_u/i32": sign([i32], [f64]),
"f64.convert_s/i64": sign([i64], [f64]),
"f64.convert_u/i64": sign([i64], [f64]),
"f64.promote/f32": sign([f32], [f64]),
"i32.reinterpret/f32": sign([f32], [i32]),
"i64.reinterpret/f64": sign([f64], [i64]),
"f32.reinterpret/i32": sign([i32], [f32]),
"f64.reinterpret/i64": sign([i64], [f64])
};
var signatures = Object.assign({}, controlInstructions, parametricInstructions, variableInstructions, memoryInstructions, numericInstructions);
exports.signatures = signatures;

View File

@@ -1,83 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.transform = transform;
var t = require("../../index"); // func and call_indirect instructions can either define a signature inline, or
// reference a signature, e.g.
//
// ;; inline signature
// (func (result i64)
// (i64.const 2)
// )
// ;; signature reference
// (type (func (result i64)))
// (func (type 0)
// (i64.const 2))
// )
//
// this AST transform denormalises the type references, making all signatures within the module
// inline.
function transform(ast) {
var typeInstructions = [];
t.traverse(ast, {
TypeInstruction: function TypeInstruction(_ref) {
var node = _ref.node;
typeInstructions.push(node);
}
});
if (!typeInstructions.length) {
return;
}
function denormalizeSignature(signature) {
// signature referenced by identifier
if (signature.type === "Identifier") {
var identifier = signature;
var typeInstruction = typeInstructions.find(function (t) {
return t.id.type === identifier.type && t.id.value === identifier.value;
});
if (!typeInstruction) {
throw new Error("A type instruction reference was not found ".concat(JSON.stringify(signature)));
}
return typeInstruction.functype;
} // signature referenced by index
if (signature.type === "NumberLiteral") {
var signatureRef = signature;
var _typeInstruction = typeInstructions[signatureRef.value];
return _typeInstruction.functype;
}
return signature;
}
t.traverse(ast, {
Func: function (_Func) {
function Func(_x) {
return _Func.apply(this, arguments);
}
Func.toString = function () {
return _Func.toString();
};
return Func;
}(function (_ref2) {
var node = _ref2.node;
node.signature = denormalizeSignature(node.signature);
}),
CallIndirectInstruction: function CallIndirectInstruction(_ref3) {
var node = _ref3.node;
node.signature = denormalizeSignature(node.signature);
}
});
}

View File

@@ -1,225 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.transform = transform;
var _index = require("../../index");
var _helperModuleContext = require("@webassemblyjs/helper-module-context");
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
function _sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
function _slicedToArray(arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return _sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }
// FIXME(sven): do the same with all block instructions, must be more generic here
function newUnexpectedFunction(i) {
return new Error("unknown function at offset: " + i);
}
function transform(ast) {
var module;
(0, _index.traverse)(ast, {
Module: function (_Module) {
function Module(_x) {
return _Module.apply(this, arguments);
}
Module.toString = function () {
return _Module.toString();
};
return Module;
}(function (path) {
module = path.node;
})
});
var moduleContext = (0, _helperModuleContext.moduleContextFromModuleAST)(module); // Transform the actual instruction in function bodies
(0, _index.traverse)(ast, {
Func: function (_Func) {
function Func(_x2) {
return _Func.apply(this, arguments);
}
Func.toString = function () {
return _Func.toString();
};
return Func;
}(function (path) {
transformFuncPath(path, moduleContext);
}),
Start: function (_Start) {
function Start(_x3) {
return _Start.apply(this, arguments);
}
Start.toString = function () {
return _Start.toString();
};
return Start;
}(function (path) {
var index = path.node.index;
if ((0, _index.isIdentifier)(index) === true) {
var offsetInModule = moduleContext.getFunctionOffsetByIdentifier(index.value);
if (typeof offsetInModule === "undefined") {
throw newUnexpectedFunction(index.value);
} // Replace the index Identifier
// $FlowIgnore: reference?
path.node.index = (0, _index.numberLiteralFromRaw)(offsetInModule);
}
})
});
}
function transformFuncPath(funcPath, moduleContext) {
var funcNode = funcPath.node;
var signature = funcNode.signature;
if (signature.type !== "Signature") {
throw new Error("Function signatures must be denormalised before execution");
}
var params = signature.params; // Add func locals in the context
params.forEach(function (p) {
return moduleContext.addLocal(p.valtype);
});
(0, _index.traverse)(funcNode, {
Instr: function (_Instr) {
function Instr(_x4) {
return _Instr.apply(this, arguments);
}
Instr.toString = function () {
return _Instr.toString();
};
return Instr;
}(function (instrPath) {
var instrNode = instrPath.node;
/**
* Local access
*/
if (instrNode.id === "get_local" || instrNode.id === "set_local" || instrNode.id === "tee_local") {
var _instrNode$args = _slicedToArray(instrNode.args, 1),
firstArg = _instrNode$args[0];
if (firstArg.type === "Identifier") {
var offsetInParams = params.findIndex(function (_ref) {
var id = _ref.id;
return id === firstArg.value;
});
if (offsetInParams === -1) {
throw new Error("".concat(firstArg.value, " not found in ").concat(instrNode.id, ": not declared in func params"));
} // Replace the Identifer node by our new NumberLiteral node
instrNode.args[0] = (0, _index.numberLiteralFromRaw)(offsetInParams);
}
}
/**
* Global access
*/
if (instrNode.id === "get_global" || instrNode.id === "set_global") {
var _instrNode$args2 = _slicedToArray(instrNode.args, 1),
_firstArg = _instrNode$args2[0];
if ((0, _index.isIdentifier)(_firstArg) === true) {
var globalOffset = moduleContext.getGlobalOffsetByIdentifier( // $FlowIgnore: reference?
_firstArg.value);
if (typeof globalOffset === "undefined") {
// $FlowIgnore: reference?
throw new Error("global ".concat(_firstArg.value, " not found in module"));
} // Replace the Identifer node by our new NumberLiteral node
instrNode.args[0] = (0, _index.numberLiteralFromRaw)(globalOffset);
}
}
/**
* Labels lookup
*/
if (instrNode.id === "br") {
var _instrNode$args3 = _slicedToArray(instrNode.args, 1),
_firstArg2 = _instrNode$args3[0];
if ((0, _index.isIdentifier)(_firstArg2) === true) {
// if the labels is not found it is going to be replaced with -1
// which is invalid.
var relativeBlockCount = -1; // $FlowIgnore: reference?
instrPath.findParent(function (_ref2) {
var node = _ref2.node;
if ((0, _index.isBlock)(node)) {
relativeBlockCount++; // $FlowIgnore: reference?
var name = node.label || node.name;
if (_typeof(name) === "object") {
// $FlowIgnore: isIdentifier ensures that
if (name.value === _firstArg2.value) {
// Found it
return false;
}
}
}
if ((0, _index.isFunc)(node)) {
return false;
}
}); // Replace the Identifer node by our new NumberLiteral node
instrNode.args[0] = (0, _index.numberLiteralFromRaw)(relativeBlockCount);
}
}
}),
/**
* Func lookup
*/
CallInstruction: function (_CallInstruction) {
function CallInstruction(_x5) {
return _CallInstruction.apply(this, arguments);
}
CallInstruction.toString = function () {
return _CallInstruction.toString();
};
return CallInstruction;
}(function (_ref3) {
var node = _ref3.node;
var index = node.index;
if ((0, _index.isIdentifier)(index) === true) {
var offsetInModule = moduleContext.getFunctionOffsetByIdentifier(index.value);
if (typeof offsetInModule === "undefined") {
throw newUnexpectedFunction(index.value);
} // Replace the index Identifier
// $FlowIgnore: reference?
node.index = (0, _index.numberLiteralFromRaw)(offsetInModule);
}
})
});
}

View File

@@ -1,105 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.traverse = traverse;
var _nodePath = require("./node-path");
var _nodes = require("./nodes");
// recursively walks the AST starting at the given node. The callback is invoked for
// and object that has a 'type' property.
function walk(context, callback) {
var stop = false;
function innerWalk(context, callback) {
if (stop) {
return;
}
var node = context.node;
if (node === undefined) {
console.warn("traversing with an empty context");
return;
}
if (node._deleted === true) {
return;
}
var path = (0, _nodePath.createPath)(context);
callback(node.type, path);
if (path.shouldStop) {
stop = true;
return;
}
Object.keys(node).forEach(function (prop) {
var value = node[prop];
if (value === null || value === undefined) {
return;
}
var valueAsArray = Array.isArray(value) ? value : [value];
valueAsArray.forEach(function (childNode) {
if (typeof childNode.type === "string") {
var childContext = {
node: childNode,
parentKey: prop,
parentPath: path,
shouldStop: false,
inList: Array.isArray(value)
};
innerWalk(childContext, callback);
}
});
});
}
innerWalk(context, callback);
}
var noop = function noop() {};
function traverse(node, visitors) {
var before = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : noop;
var after = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : noop;
Object.keys(visitors).forEach(function (visitor) {
if (!_nodes.nodeAndUnionTypes.includes(visitor)) {
throw new Error("Unexpected visitor ".concat(visitor));
}
});
var context = {
node: node,
inList: false,
shouldStop: false,
parentPath: null,
parentKey: null
};
walk(context, function (type, path) {
if (typeof visitors[type] === "function") {
before(type, path);
visitors[type](path);
after(type, path);
}
var unionTypes = _nodes.unionTypesMap[type];
if (!unionTypes) {
throw new Error("Unexpected node type ".concat(type));
}
unionTypes.forEach(function (unionType) {
if (typeof visitors[unionType] === "function") {
before(unionType, path);
visitors[unionType](path);
after(unionType, path);
}
});
});
}

View File

View File

View File

@@ -1,306 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.isAnonymous = isAnonymous;
exports.getSectionMetadata = getSectionMetadata;
exports.getSectionMetadatas = getSectionMetadatas;
exports.sortSectionMetadata = sortSectionMetadata;
exports.orderedInsertNode = orderedInsertNode;
exports.assertHasLoc = assertHasLoc;
exports.getEndOfSection = getEndOfSection;
exports.shiftLoc = shiftLoc;
exports.shiftSection = shiftSection;
exports.signatureForOpcode = signatureForOpcode;
exports.getUniqueNameGenerator = getUniqueNameGenerator;
exports.getStartByteOffset = getStartByteOffset;
exports.getEndByteOffset = getEndByteOffset;
exports.getFunctionBeginingByteOffset = getFunctionBeginingByteOffset;
exports.getEndBlockByteOffset = getEndBlockByteOffset;
exports.getStartBlockByteOffset = getStartBlockByteOffset;
var _signatures = require("./signatures");
var _traverse = require("./traverse");
var _helperWasmBytecode = _interopRequireWildcard(require("@webassemblyjs/helper-wasm-bytecode"));
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }
function _sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
function _slicedToArray(arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return _sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
function isAnonymous(ident) {
return ident.raw === "";
}
function getSectionMetadata(ast, name) {
var section;
(0, _traverse.traverse)(ast, {
SectionMetadata: function (_SectionMetadata) {
function SectionMetadata(_x) {
return _SectionMetadata.apply(this, arguments);
}
SectionMetadata.toString = function () {
return _SectionMetadata.toString();
};
return SectionMetadata;
}(function (_ref) {
var node = _ref.node;
if (node.section === name) {
section = node;
}
})
});
return section;
}
function getSectionMetadatas(ast, name) {
var sections = [];
(0, _traverse.traverse)(ast, {
SectionMetadata: function (_SectionMetadata2) {
function SectionMetadata(_x2) {
return _SectionMetadata2.apply(this, arguments);
}
SectionMetadata.toString = function () {
return _SectionMetadata2.toString();
};
return SectionMetadata;
}(function (_ref2) {
var node = _ref2.node;
if (node.section === name) {
sections.push(node);
}
})
});
return sections;
}
function sortSectionMetadata(m) {
if (m.metadata == null) {
console.warn("sortSectionMetadata: no metadata to sort");
return;
} // $FlowIgnore
m.metadata.sections.sort(function (a, b) {
var aId = _helperWasmBytecode.default.sections[a.section];
var bId = _helperWasmBytecode.default.sections[b.section];
if (typeof aId !== "number" || typeof bId !== "number") {
throw new Error("Section id not found");
}
return aId - bId;
});
}
function orderedInsertNode(m, n) {
assertHasLoc(n);
var didInsert = false;
if (n.type === "ModuleExport") {
m.fields.push(n);
return;
}
m.fields = m.fields.reduce(function (acc, field) {
var fieldEndCol = Infinity;
if (field.loc != null) {
// $FlowIgnore
fieldEndCol = field.loc.end.column;
} // $FlowIgnore: assertHasLoc ensures that
if (didInsert === false && n.loc.start.column < fieldEndCol) {
didInsert = true;
acc.push(n);
}
acc.push(field);
return acc;
}, []); // Handles empty modules or n is the last element
if (didInsert === false) {
m.fields.push(n);
}
}
function assertHasLoc(n) {
if (n.loc == null || n.loc.start == null || n.loc.end == null) {
throw new Error("Internal failure: node (".concat(JSON.stringify(n.type), ") has no location information"));
}
}
function getEndOfSection(s) {
assertHasLoc(s.size);
return s.startOffset + s.size.value + ( // $FlowIgnore
s.size.loc.end.column - s.size.loc.start.column);
}
function shiftLoc(node, delta) {
// $FlowIgnore
node.loc.start.column += delta; // $FlowIgnore
node.loc.end.column += delta;
}
function shiftSection(ast, node, delta) {
if (node.type !== "SectionMetadata") {
throw new Error("Can not shift node " + JSON.stringify(node.type));
}
node.startOffset += delta;
if (_typeof(node.size.loc) === "object") {
shiftLoc(node.size, delta);
} // Custom sections doesn't have vectorOfSize
if (_typeof(node.vectorOfSize) === "object" && _typeof(node.vectorOfSize.loc) === "object") {
shiftLoc(node.vectorOfSize, delta);
}
var sectionName = node.section; // shift node locations within that section
(0, _traverse.traverse)(ast, {
Node: function Node(_ref3) {
var node = _ref3.node;
var section = (0, _helperWasmBytecode.getSectionForNode)(node);
if (section === sectionName && _typeof(node.loc) === "object") {
shiftLoc(node, delta);
}
}
});
}
function signatureForOpcode(object, name) {
var opcodeName = name;
if (object !== undefined && object !== "") {
opcodeName = object + "." + name;
}
var sign = _signatures.signatures[opcodeName];
if (sign == undefined) {
// TODO: Uncomment this when br_table and others has been done
//throw new Error("Invalid opcode: "+opcodeName);
return [object, object];
}
return sign[0];
}
function getUniqueNameGenerator() {
var inc = {};
return function () {
var prefix = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : "temp";
if (!(prefix in inc)) {
inc[prefix] = 0;
} else {
inc[prefix] = inc[prefix] + 1;
}
return prefix + "_" + inc[prefix];
};
}
function getStartByteOffset(n) {
// $FlowIgnore
if (typeof n.loc === "undefined" || typeof n.loc.start === "undefined") {
throw new Error( // $FlowIgnore
"Can not get byte offset without loc informations, node: " + String(n.id));
}
return n.loc.start.column;
}
function getEndByteOffset(n) {
// $FlowIgnore
if (typeof n.loc === "undefined" || typeof n.loc.end === "undefined") {
throw new Error("Can not get byte offset without loc informations, node: " + n.type);
}
return n.loc.end.column;
}
function getFunctionBeginingByteOffset(n) {
if (!(n.body.length > 0)) {
throw new Error('n.body.length > 0' + " error: " + (undefined || "unknown"));
}
var _n$body = _slicedToArray(n.body, 1),
firstInstruction = _n$body[0];
return getStartByteOffset(firstInstruction);
}
function getEndBlockByteOffset(n) {
// $FlowIgnore
if (!(n.instr.length > 0 || n.body.length > 0)) {
throw new Error('n.instr.length > 0 || n.body.length > 0' + " error: " + (undefined || "unknown"));
}
var lastInstruction;
if (n.instr) {
// $FlowIgnore
lastInstruction = n.instr[n.instr.length - 1];
}
if (n.body) {
// $FlowIgnore
lastInstruction = n.body[n.body.length - 1];
}
if (!(_typeof(lastInstruction) === "object")) {
throw new Error('typeof lastInstruction === "object"' + " error: " + (undefined || "unknown"));
}
// $FlowIgnore
return getStartByteOffset(lastInstruction);
}
function getStartBlockByteOffset(n) {
// $FlowIgnore
if (!(n.instr.length > 0 || n.body.length > 0)) {
throw new Error('n.instr.length > 0 || n.body.length > 0' + " error: " + (undefined || "unknown"));
}
var fistInstruction;
if (n.instr) {
// $FlowIgnore
var _n$instr = _slicedToArray(n.instr, 1);
fistInstruction = _n$instr[0];
}
if (n.body) {
// $FlowIgnore
var _n$body2 = _slicedToArray(n.body, 1);
fistInstruction = _n$body2[0];
}
if (!(_typeof(fistInstruction) === "object")) {
throw new Error('typeof fistInstruction === "object"' + " error: " + (undefined || "unknown"));
}
// $FlowIgnore
return getStartByteOffset(fistInstruction);
}

View File

@@ -1,33 +0,0 @@
{
"name": "@webassemblyjs/ast",
"version": "1.9.0",
"description": "AST utils for webassemblyjs",
"keywords": [
"webassembly",
"javascript",
"ast"
],
"main": "lib/index.js",
"module": "esm/index.js",
"author": "Sven Sauleau",
"license": "MIT",
"dependencies": {
"@webassemblyjs/helper-module-context": "1.9.0",
"@webassemblyjs/helper-wasm-bytecode": "1.9.0",
"@webassemblyjs/wast-parser": "1.9.0"
},
"repository": {
"type": "git",
"url": "https://github.com/xtuc/webassemblyjs.git"
},
"publishConfig": {
"access": "public"
},
"devDependencies": {
"@webassemblyjs/helper-test-framework": "1.9.0",
"array.prototype.flatmap": "^1.2.1",
"dump-exports": "^0.1.0",
"mamacro": "^0.0.7"
},
"gitHead": "0440b420888c1f7701eb9762ec657775506b87d8"
}

View File

@@ -1,219 +0,0 @@
const definitions = require("../src/definitions");
const flatMap = require("array.prototype.flatmap");
const {
typeSignature,
iterateProps,
mapProps,
filterProps,
unique
} = require("./util");
const stdout = process.stdout;
const jsTypes = ["string", "number", "boolean"];
const quote = value => `"${value}"`;
function params(fields) {
const optionalDefault = field => (field.default ? ` = ${field.default}` : "");
return mapProps(fields)
.map(field => `${typeSignature(field)}${optionalDefault(field)}`)
.join(",");
}
function assertParamType({ assertNodeType, array, name, type }) {
if (array) {
// TODO - assert contents of array?
return `assert(typeof ${name} === "object" && typeof ${name}.length !== "undefined")\n`;
} else {
if (jsTypes.includes(type)) {
return `assert(
typeof ${name} === "${type}",
"Argument ${name} must be of type ${type}, given: " + typeof ${name}
)`;
}
if (assertNodeType === true) {
return `assert(
${name}.type === "${type}",
"Argument ${name} must be of type ${type}, given: " + ${name}.type
)`;
}
return "";
}
}
function assertParam(meta) {
const paramAssertion = assertParamType(meta);
if (paramAssertion === "") {
return "";
}
if (meta.maybe || meta.optional) {
return `
if (${meta.name} !== null && ${meta.name} !== undefined) {
${paramAssertion};
}
`;
} else {
return paramAssertion;
}
}
function assertParams(fields) {
return mapProps(fields)
.map(assertParam)
.join("\n");
}
function buildObject(typeDef) {
const optionalField = meta => {
if (meta.array) {
// omit optional array properties if the constructor function was supplied
// with an empty array
return `
if (typeof ${meta.name} !== "undefined" && ${meta.name}.length > 0) {
node.${meta.name} = ${meta.name};
}
`;
} else if (meta.type === "Object") {
// omit optional object properties if they have no keys
return `
if (typeof ${meta.name} !== "undefined" && Object.keys(${
meta.name
}).length !== 0) {
node.${meta.name} = ${meta.name};
}
`;
} else if (meta.type === "boolean") {
// omit optional boolean properties if they are not true
return `
if (${meta.name} === true) {
node.${meta.name} = true;
}
`;
} else {
return `
if (typeof ${meta.name} !== "undefined") {
node.${meta.name} = ${meta.name};
}
`;
}
};
const fields = mapProps(typeDef.fields)
.filter(f => !f.optional && !f.constant)
.map(f => f.name);
const constants = mapProps(typeDef.fields)
.filter(f => f.constant)
.map(f => `${f.name}: "${f.value}"`);
return `
const node: ${typeDef.flowTypeName || typeDef.name} = {
type: "${typeDef.name}",
${constants.concat(fields).join(",")}
}
${mapProps(typeDef.fields)
.filter(f => f.optional)
.map(optionalField)
.join("")}
`;
}
function lowerCamelCase(name) {
return name.substring(0, 1).toLowerCase() + name.substring(1);
}
function generate() {
stdout.write(`
// @flow
// THIS FILE IS AUTOGENERATED
// see scripts/generateNodeUtils.js
import { assert } from "mamacro";
function isTypeOf(t: string) {
return (n: Node) => n.type === t;
}
function assertTypeOf(t: string) {
return (n: Node) => assert(n.type === t);
}
`);
// Node builders
iterateProps(definitions, typeDefinition => {
stdout.write(`
export function ${lowerCamelCase(typeDefinition.name)} (
${params(filterProps(typeDefinition.fields, f => !f.constant))}
): ${typeDefinition.name} {
${assertParams(filterProps(typeDefinition.fields, f => !f.constant))}
${buildObject(typeDefinition)}
return node;
}
`);
});
// Node testers
iterateProps(definitions, typeDefinition => {
stdout.write(`
export const is${typeDefinition.name} =
isTypeOf("${typeDefinition.name}");
`);
});
// Node union type testers
const unionTypes = unique(
flatMap(mapProps(definitions).filter(d => d.unionType), d => d.unionType)
);
unionTypes.forEach(unionType => {
stdout.write(
`
export const is${unionType} = (node: Node) => ` +
mapProps(definitions)
.filter(d => d.unionType && d.unionType.includes(unionType))
.map(d => `is${d.name}(node) `)
.join("||") +
";\n\n"
);
});
// Node assertion
iterateProps(definitions, typeDefinition => {
stdout.write(`
export const assert${typeDefinition.name} =
assertTypeOf("${typeDefinition.name}");
`);
});
// a map from node type to its set of union types
stdout.write(
`
export const unionTypesMap = {` +
mapProps(definitions)
.filter(d => d.unionType)
.map(t => `"${t.name}": [${t.unionType.map(quote).join(",")}]\n`) +
`};
`
);
// an array of all node and union types
stdout.write(
`
export const nodeAndUnionTypes = [` +
mapProps(definitions)
.map(t => `"${t.name}"`)
.concat(unionTypes.map(quote))
.join(",") +
`];`
);
}
generate();

Some files were not shown because too many files have changed in this diff Show More