mirror of
https://github.com/github/codeql-action.git
synced 2025-12-15 20:09:17 +08:00
Compare commits
55 Commits
always-rep
...
examples
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
01974b0f5e | ||
|
|
5e3a4c1d90 | ||
|
|
6a82723ea1 | ||
|
|
6507fba7ec | ||
|
|
aa54af7018 | ||
|
|
74c9991849 | ||
|
|
f49335fc3b | ||
|
|
d7b9f5a097 | ||
|
|
572c8bbc0c | ||
|
|
0347b72305 | ||
|
|
27cc8b23fe | ||
|
|
584df475ca | ||
|
|
cd95d34497 | ||
|
|
88c1b7fb89 | ||
|
|
51b42fcf78 | ||
|
|
015ead73d9 | ||
|
|
c351304778 | ||
|
|
96901ac7d8 | ||
|
|
cc471c2014 | ||
|
|
c88fb695ab | ||
|
|
ec4d38a9a5 | ||
|
|
15bd158ded | ||
|
|
256c63a715 | ||
|
|
a76042ab4a | ||
|
|
1477a43cc8 | ||
|
|
f17ebc80bd | ||
|
|
c0d9de18c0 | ||
|
|
52cd1f2261 | ||
|
|
3455736978 | ||
|
|
f668f5fc74 | ||
|
|
3aa3d6a2b6 | ||
|
|
538cbdd614 | ||
|
|
49575f87c4 | ||
|
|
5a800ccbfa | ||
|
|
cc2c18d6a8 | ||
|
|
4c11b3d9bf | ||
|
|
a511aca9f1 | ||
|
|
c3847056c5 | ||
|
|
189a899282 | ||
|
|
c5ecb82753 | ||
|
|
4dc964d906 | ||
|
|
dc27ff90bd | ||
|
|
cf266cbf27 | ||
|
|
1f29db50bb | ||
|
|
c979850d28 | ||
|
|
baa9c9e0df | ||
|
|
d966ea2f52 | ||
|
|
6bab450a9a | ||
|
|
583f8a923c | ||
|
|
ab918b676b | ||
|
|
290b34d5df | ||
|
|
dcd81b5847 | ||
|
|
d90fca396a | ||
|
|
546d5a8843 | ||
|
|
43de3a9949 |
4
.github/codeql/codeql-config.yml
vendored
4
.github/codeql/codeql-config.yml
vendored
@@ -1,4 +1,6 @@
|
|||||||
me: "CodeQL config"
|
name: "CodeQL config"
|
||||||
queries:
|
queries:
|
||||||
- name: Run custom queries
|
- name: Run custom queries
|
||||||
uses: ./queries
|
uses: ./queries
|
||||||
|
paths-ignore:
|
||||||
|
- tests
|
||||||
3
.github/workflows/codeql.yml
vendored
3
.github/workflows/codeql.yml
vendored
@@ -13,5 +13,6 @@ jobs:
|
|||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
- uses: ./init
|
- uses: ./init
|
||||||
with:
|
with:
|
||||||
config-file: ./.github/codeql/codeql-config.yml
|
languages: javascript
|
||||||
|
config-file: ./.github/codeql/codeql-config.yml
|
||||||
- uses: ./analyze
|
- uses: ./analyze
|
||||||
|
|||||||
130
.github/workflows/integration-testing.yml
vendored
130
.github/workflows/integration-testing.yml
vendored
@@ -3,20 +3,118 @@ name: "Integration Testing"
|
|||||||
on: [push]
|
on: [push]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
dispatch-events:
|
multi-language-repo_test-autodetect-languages:
|
||||||
if: github.event.repository.full_name == 'github/codeql-action'
|
strategy:
|
||||||
runs-on: ubuntu-latest
|
fail-fast: false
|
||||||
steps:
|
matrix:
|
||||||
- name: Send repository dispatch events
|
os: [ubuntu-latest, windows-latest]
|
||||||
run: |
|
runs-on: ${{ matrix.os }}
|
||||||
curl -X POST \
|
|
||||||
-H "Authorization: Bearer ${{ secrets.CODEQL_TESTING_TOKEN }}" \
|
|
||||||
-H "Accept: application/vnd.github.everest-preview+json" \
|
|
||||||
https://api.github.com/repos/Anthophila/amazon-cognito-js-copy/dispatches \
|
|
||||||
-d '{"event_type":"codeql-integration","client_payload": {"sha": "${{ github.sha }}"}}'
|
|
||||||
|
|
||||||
curl -X POST \
|
steps:
|
||||||
-H "Authorization: Bearer ${{ secrets.CODEQL_TESTING_TOKEN }}" \
|
- uses: actions/checkout@v2
|
||||||
-H "Accept: application/vnd.github.everest-preview+json" \
|
- name: Move codeql-action
|
||||||
https://api.github.com/repos/Anthophila/electron-test-action/dispatches \
|
shell: bash
|
||||||
-d '{"event_type":"codeql-integration","client_payload": {"sha": "${{ github.sha }}"}}'
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
shopt -s dotglob
|
||||||
|
mv * ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/* .
|
||||||
|
- uses: ./../action/init
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
multi-language-repo_test-custom-queries:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
shopt -s dotglob
|
||||||
|
mv * ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/* .
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: cpp,csharp,java,javascript,python
|
||||||
|
config-file: ./.github/codeql/custom-queries.yml
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
# Currently is not possible to analyze Go in conjunction with other languages in macos
|
||||||
|
multi-language-repo_test-go-custom-queries:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/setup-go@v2
|
||||||
|
if: ${{ matrix.os == 'macos-latest' }}
|
||||||
|
with:
|
||||||
|
go-version: '^1.13.1'
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
shopt -s dotglob
|
||||||
|
mv * ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/* .
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: go
|
||||||
|
config-file: ./.github/codeql/custom-queries.yml
|
||||||
|
- name: Build code
|
||||||
|
shell: bash
|
||||||
|
run: ./build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
|
||||||
|
multi-language-repo_rubocop:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Move codeql-action
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
mkdir ../action
|
||||||
|
shopt -s dotglob
|
||||||
|
mv * ../action/
|
||||||
|
mv ../action/tests/multi-language-repo/* .
|
||||||
|
- name: Set up Ruby
|
||||||
|
uses: ruby/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: 2.6
|
||||||
|
- name: Install Code Scanning integration
|
||||||
|
run: bundle add code-scanning-rubocop --version 0.2.0 --skip-install
|
||||||
|
- name: Install dependencies
|
||||||
|
run: bundle install
|
||||||
|
- name: Rubocop run
|
||||||
|
run: |
|
||||||
|
bash -c "
|
||||||
|
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
|
||||||
|
[[ $? -ne 2 ]]
|
||||||
|
"
|
||||||
|
- uses: ./../action/upload-sarif
|
||||||
|
with:
|
||||||
|
sarif_file: rubocop.sarif
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
27
.github/workflows/js-uptodate-check.yml
vendored
27
.github/workflows/js-uptodate-check.yml
vendored
@@ -1,27 +0,0 @@
|
|||||||
name: "Check generated JavaScript"
|
|
||||||
|
|
||||||
on: [pull_request]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-js:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v1
|
|
||||||
- name: Check generated JavaScript
|
|
||||||
run: |
|
|
||||||
# Sanity check that repo is clean to start with
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then this workflow needs attention...
|
|
||||||
>&2 echo "Failed: Repo should be clean before testing!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
# Generate the JavaScript files
|
|
||||||
npm run-script build
|
|
||||||
# Check that repo is still clean
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
|
||||||
# If we get a fail here then the PR needs attention
|
|
||||||
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "Success: JavaScript files are up to date"
|
|
||||||
12
.github/workflows/npm-test.yml
vendored
12
.github/workflows/npm-test.yml
vendored
@@ -1,12 +0,0 @@
|
|||||||
name: "npm run-script test"
|
|
||||||
|
|
||||||
on: [push]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
npm-test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v1
|
|
||||||
- name: npm run-script test
|
|
||||||
run: npm run-script test
|
|
||||||
71
.github/workflows/pr-checks.yml
vendored
Normal file
71
.github/workflows/pr-checks.yml
vendored
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
name: "PR checks"
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
tslint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
- name: tslint
|
||||||
|
run: npm run-script lint
|
||||||
|
|
||||||
|
check-js:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
- name: Check generated JavaScript
|
||||||
|
run: |
|
||||||
|
# Sanity check that repo is clean to start with
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then this workflow needs attention...
|
||||||
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
# Generate the JavaScript files
|
||||||
|
npm run-script build
|
||||||
|
# Check that repo is still clean
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then the PR needs attention
|
||||||
|
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
|
||||||
|
git status
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Success: JavaScript files are up to date"
|
||||||
|
|
||||||
|
check-node-modules:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
- name: Check node modules up to date
|
||||||
|
run: |
|
||||||
|
# Sanity check that repo is clean to start with
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then this workflow needs attention...
|
||||||
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Reinstall modules and then clean to remove absolute paths
|
||||||
|
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||||
|
npm ci
|
||||||
|
npm run removeNPMAbsolutePaths
|
||||||
|
# Check that repo is still clean
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
# If we get a fail here then the PR needs attention
|
||||||
|
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
|
||||||
|
git status
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Success: node_modules are up to date"
|
||||||
|
|
||||||
|
npm-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
- name: npm run-script test
|
||||||
|
run: npm run-script test
|
||||||
12
.github/workflows/ts-lint.yml
vendored
12
.github/workflows/ts-lint.yml
vendored
@@ -1,12 +0,0 @@
|
|||||||
name: "TSLint"
|
|
||||||
|
|
||||||
on: [push]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
tslint:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v1
|
|
||||||
- name: tslint
|
|
||||||
run: npm run-script lint
|
|
||||||
@@ -128,6 +128,8 @@ paths-ignore:
|
|||||||
- lib
|
- lib
|
||||||
```
|
```
|
||||||
|
|
||||||
|
There are some ready to use configuration files in the examples folder that will add more queries to the analysis.
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
### Trouble with Go dependencies
|
### Trouble with Go dependencies
|
||||||
@@ -143,7 +145,7 @@ env:
|
|||||||
|
|
||||||
to `github/codeql-action/analyze`.
|
to `github/codeql-action/analyze`.
|
||||||
|
|
||||||
### If you do not use a vendor directory
|
#### If you do not use a vendor directory
|
||||||
|
|
||||||
Dependencies on public repositories should just work. If you have dependencies on private repositories, one option is to use `git config` and a [personal access token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) to authenticate when downloading dependencies. Add a section like
|
Dependencies on public repositories should just work. If you have dependencies on private repositories, one option is to use `git config` and a [personal access token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) to authenticate when downloading dependencies. Add a section like
|
||||||
|
|
||||||
@@ -168,3 +170,7 @@ dotnet build /p:UseSharedCompilation=false
|
|||||||
```
|
```
|
||||||
|
|
||||||
Version 3 does not require the additional flag.
|
Version 3 does not require the additional flag.
|
||||||
|
|
||||||
|
### Analysing Go together with other languages on `macos-latest`
|
||||||
|
|
||||||
|
When running on macos it is currently not possible to analyze Go in conjunction with any of Java, C/C++, or C#. Each language can still be analyzed separately.
|
||||||
4
examples/extended-cpp-queryset.yml
Normal file
4
examples/extended-cpp-queryset.yml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
name: extended-cpp-queryset
|
||||||
|
queries:
|
||||||
|
- name: Additional C++ queries
|
||||||
|
uses: github/codeql/cpp/ql/src/codeql-suites/cpp-lgtm.qls@lgtm.com
|
||||||
4
examples/extended-csharp-queryset.yml
Normal file
4
examples/extended-csharp-queryset.yml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
name: extended-csharp-queryset
|
||||||
|
queries:
|
||||||
|
- name: Additional C# queries
|
||||||
|
uses: github/codeql/csharp/ql/src/codeql-suites/csharp-lgtm.qls@lgtm.com
|
||||||
4
examples/extended-go-queryset.yml
Normal file
4
examples/extended-go-queryset.yml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
name: extended-go-queryset
|
||||||
|
queries:
|
||||||
|
- name: Additional Go queries
|
||||||
|
uses: github/codeql-go/ql/src/codeql-suites/go-lgtm.qls@lgtm.com
|
||||||
4
examples/extended-java-queryset.yml
Normal file
4
examples/extended-java-queryset.yml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
name: extended-java-queryset
|
||||||
|
queries:
|
||||||
|
- name: Additional Java queries
|
||||||
|
uses: github/codeql/java/ql/src/codeql-suites/java-lgtm.qls@lgtm.com
|
||||||
4
examples/extended-javascript-queryset.yml
Normal file
4
examples/extended-javascript-queryset.yml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
name: extended-javascript-queryset
|
||||||
|
queries:
|
||||||
|
- name: Additional Javascript queries
|
||||||
|
uses: github/codeql/javascript/ql/src/codeql-suites/javascript-lgtm.qls@lgtm.com
|
||||||
4
examples/extended-python-queryset.yml
Normal file
4
examples/extended-python-queryset.yml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
name: extended-python-queryset
|
||||||
|
queries:
|
||||||
|
- name: Additional Python queries
|
||||||
|
uses: github/codeql/python/ql/src/codeql-suites/python-lgtm.qls@lgtm.com
|
||||||
1
lib/analysis-paths.js
generated
1
lib/analysis-paths.js
generated
@@ -25,3 +25,4 @@ function includeAndExcludeAnalysisPaths(config, languages) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;
|
exports.includeAndExcludeAnalysisPaths = includeAndExcludeAnalysisPaths;
|
||||||
|
//# sourceMappingURL=analysis-paths.js.map
|
||||||
1
lib/analysis-paths.js.map
Normal file
1
lib/analysis-paths.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,SAAgB,8BAA8B,CAAC,MAA0B,EAAE,SAAmB;IAC1F,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3B,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KACtE;IAED,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACjC,IAAI,CAAC,cAAc,CAAC,oBAAoB,EAAE,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;KAC5E;IAED,SAAS,qBAAqB,CAAC,QAAQ;QACnC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;IAC9D,CAAC;IAED,2DAA2D;IAC3D,+DAA+D;IAC/D,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAAE;QAC3G,IAAI,CAAC,OAAO,CAAC,4FAA4F,CAAC,CAAC;KAC9G;AACL,CAAC;AAlBD,wEAkBC"}
|
||||||
30
lib/analysis-paths.test.js
generated
Normal file
30
lib/analysis-paths.test.js
generated
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||||
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
|
ava_1.default("emptyPaths", async (t) => {
|
||||||
|
let config = new configUtils.Config();
|
||||||
|
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||||
|
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
|
||||||
|
t.is(process.env['LGTM_INDEX_EXCLUDE'], undefined);
|
||||||
|
});
|
||||||
|
ava_1.default("nonEmptyPaths", async (t) => {
|
||||||
|
let config = new configUtils.Config();
|
||||||
|
config.paths.push('path1', 'path2');
|
||||||
|
config.pathsIgnore.push('path3', 'path4');
|
||||||
|
analysisPaths.includeAndExcludeAnalysisPaths(config, []);
|
||||||
|
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
|
||||||
|
t.is(process.env['LGTM_INDEX_EXCLUDE'], 'path3\npath4');
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=analysis-paths.test.js.map
|
||||||
1
lib/analysis-paths.test.js.map
Normal file
1
lib/analysis-paths.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,4DAA8C;AAE9C,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACzB,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC5B,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IACpC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC1C,aAAa,CAAC,8BAA8B,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IACzD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;AAC5D,CAAC,CAAC,CAAC"}
|
||||||
1
lib/autobuild.js
generated
1
lib/autobuild.js
generated
@@ -58,3 +58,4 @@ run().catch(e => {
|
|||||||
core.setFailed("autobuild action failed. " + e);
|
core.setFailed("autobuild action failed. " + e);
|
||||||
console.log(e);
|
console.log(e);
|
||||||
});
|
});
|
||||||
|
//# sourceMappingURL=autobuild.js.map
|
||||||
1
lib/autobuild.js.map
Normal file
1
lib/autobuild.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,2CAA6B;AAE7B,gEAAkD;AAClD,6CAA+B;AAE/B,KAAK,UAAU,GAAG;;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,EAAE;YACzF,OAAO;SACR;QAED,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,OAAA,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,8BAA8B,CAAC,0CAAE,KAAK,CAAC,GAAG,MAAK,EAAE,CAAC;QACnG,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,8DAA8D;QAC9D,MAAM,SAAS,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,iBAAiB,CAAC,CAAC;QAExE,MAAM,OAAO,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,cAAc,CAAC;QAChF,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;QAGpF,+DAA+D;QAC/D,0FAA0F;QAC1F,qDAAqD;QACrD,8EAA8E;QAC9E,gHAAgH;QAChH,IAAI,eAAe,GAAG,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,EAAE,CAAC;QAC7D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,wBAAwB,EAAE,+BAA+B,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAE1I,MAAM,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QAC9B,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,MAAM,IAAI,CAAC,kBAAkB,CAAC,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACvE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,CAAC;AAChD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||||
1
lib/config-utils.js
generated
1
lib/config-utils.js
generated
@@ -133,3 +133,4 @@ async function loadConfig() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.loadConfig = loadConfig;
|
exports.loadConfig = loadConfig;
|
||||||
|
//# sourceMappingURL=config-utils.js.map
|
||||||
1
lib/config-utils.js.map
Normal file
1
lib/config-utils.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"config-utils.js","sourceRoot":"","sources":["../src/config-utils.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,gDAAkC;AAClC,uCAAyB;AACzB,8CAAgC;AAChC,2CAA6B;AAE7B,MAAa,aAAa;IAKtB,YAAY,UAAkB,EAAE,GAAW;QAFpC,SAAI,GAAG,EAAE,CAAC;QAGb,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;IACnB,CAAC;CACJ;AATD,sCASC;AAED,MAAa,MAAM;IAAnB;QACW,SAAI,GAAG,EAAE,CAAC;QACV,0BAAqB,GAAG,KAAK,CAAC;QAC9B,sBAAiB,GAAa,EAAE,CAAC;QACjC,oBAAe,GAAoB,EAAE,CAAC;QACtC,gBAAW,GAAa,EAAE,CAAC;QAC3B,UAAK,GAAa,EAAE,CAAC;IAuChC,CAAC;IArCU,QAAQ,CAAC,SAAiB;QAC7B,qEAAqE;QACrE,kDAAkD;QAElD,IAAI,SAAS,KAAK,EAAE,EAAE;YAClB,MAAM,0CAA0C,CAAC;SACpD;QAED,IAAI,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;YAC5B,IAAI,CAAC,iBAAiB,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;YAChD,OAAO;SACV;QAED,IAAI,GAAG,GAAG,SAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAC/B,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;YAClB,MAAM,uEAAuE,GAAG,SAAS,CAAC;SAC7F;QAED,MAAM,GAAG,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC;QACnB,GAAG,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QACxB,+BAA+B;QAC/B,+BAA+B;QAC/B,yFAAyF;QACzF,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC,EAAE;YAChB,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;SAClD;QAED,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC,EAAE;YAChB,MAAM,uEAAuE,GAAG,SAAS,CAAC;SAC7F;QAED,IAAI,QAAQ,GAAG,IAAI,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;QAC7D,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;YAClB,QAAQ,CAAC,IAAI,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC;SAC1B;QACD,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;IACxC,CAAC;CACJ;AA7CD,wBA6CC;AAED,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,IAAI,oBAAoB,CAAC;AAE7E,SAAS,UAAU;IACf,MAAM,UAAU,GAAG,IAAI,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;IAEhD,MAAM,MAAM,GAAG,IAAI,MAAM,EAAE,CAAC;IAE5B,qDAAqD;IACrD,IAAI,UAAU,KAAK,EAAE,EAAE;QACnB,IAAI,CAAC,KAAK,CAAC,oCAAoC,CAAC,CAAC;QACjD,OAAO,MAAM,CAAC;KACjB;IAED,IAAI;QACA,MAAM,UAAU,GAAG,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,YAAY,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC,CAAC;QAEtE,IAAI,UAAU,CAAC,IAAI,IAAI,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ,EAAE;YACxD,MAAM,CAAC,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC;SACjC;QAED,IAAI,UAAU,CAAC,yBAAyB,CAAC,IAAI,OAAO,UAAU,CAAC,yBAAyB,CAAC,KAAK,SAAS,EAAE;YACrG,MAAM,CAAC,qBAAqB,GAAG,UAAU,CAAC,yBAAyB,CAAC,CAAC;SACxE;QAED,MAAM,OAAO,GAAG,UAAU,CAAC,OAAO,CAAC;QACnC,IAAI,OAAO,IAAI,OAAO,YAAY,KAAK,EAAE;YACrC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;gBACpB,IAAI,KAAK,CAAC,IAAI,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;oBAC9C,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;iBAC/B;YACL,CAAC,CAAC,CAAC;SACN;QAED,MAAM,WAAW,GAAG,UAAU,CAAC,cAAc,CAAC,CAAC;QAC/C,IAAI,WAAW,IAAI,WAAW,YAAY,KAAK,EAAE;YAC7C,WAAW,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;gBACvB,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;oBAC1B,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;iBACjC;YACL,CAAC,CAAC,CAAC;SACN;QAED,MAAM,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC;QAC/B,IAAI,KAAK,IAAI,KAAK,YAAY,KAAK,EAAE;YACjC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;gBACjB,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;oBAC1B,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;iBAC3B;YACL,CAAC,CAAC,CAAC;SACN;KACJ;IAAC,OAAO,GAAG,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;KACvB;IAED,OAAO,MAAM,CAAC;AAClB,CAAC;AAED,KAAK,UAAU,UAAU,CAAC,MAAc;IACpC,MAAM,YAAY,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAC5C,MAAM,EAAE,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;IAC9B,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,EAAE,YAAY,EAAE,MAAM,CAAC,CAAC;IAC1E,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC;IAC5B,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;AAC7B,CAAC;AAEM,KAAK,UAAU,UAAU;IAC5B,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;IACrD,IAAI,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE;QAC3B,MAAM,YAAY,GAAG,EAAE,CAAC,YAAY,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;QACzD,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;QAC7B,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;QACzB,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;KAEnC;SAAM;QACH,MAAM,MAAM,GAAG,UAAU,EAAE,CAAC;QAC5B,IAAI,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC;QAClC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;QACnC,MAAM,UAAU,CAAC,MAAM,CAAC,CAAC;QACzB,OAAO,MAAM,CAAC;KACjB;AACL,CAAC;AAfD,gCAeC"}
|
||||||
4
lib/external-queries.js
generated
4
lib/external-queries.js
generated
@@ -11,8 +11,9 @@ const core = __importStar(require("@actions/core"));
|
|||||||
const exec = __importStar(require("@actions/exec"));
|
const exec = __importStar(require("@actions/exec"));
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
|
const util = __importStar(require("./util"));
|
||||||
async function checkoutExternalQueries(config) {
|
async function checkoutExternalQueries(config) {
|
||||||
const folder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
const folder = util.getRequiredEnvParam('RUNNER_WORKSPACE');
|
||||||
for (const externalQuery of config.externalQueries) {
|
for (const externalQuery of config.externalQueries) {
|
||||||
core.info('Checking out ' + externalQuery.repository);
|
core.info('Checking out ' + externalQuery.repository);
|
||||||
const checkoutLocation = path.join(folder, externalQuery.repository);
|
const checkoutLocation = path.join(folder, externalQuery.repository);
|
||||||
@@ -29,3 +30,4 @@ async function checkoutExternalQueries(config) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.checkoutExternalQueries = checkoutExternalQueries;
|
exports.checkoutExternalQueries = checkoutExternalQueries;
|
||||||
|
//# sourceMappingURL=external-queries.js.map
|
||||||
1
lib/external-queries.js.map
Normal file
1
lib/external-queries.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,oDAAsC;AACtC,uCAAyB;AACzB,2CAA6B;AAG7B,6CAA+B;AAExB,KAAK,UAAU,uBAAuB,CAAC,MAA0B;IACtE,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,CAAC;IAE5D,KAAK,MAAM,aAAa,IAAI,MAAM,CAAC,eAAe,EAAE;QAClD,IAAI,CAAC,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,UAAU,CAAC,CAAC;QAEtD,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,aAAa,CAAC,UAAU,CAAC,CAAC;QACrE,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;YACpC,MAAM,OAAO,GAAG,qBAAqB,GAAG,aAAa,CAAC,UAAU,GAAG,MAAM,CAAC;YAC1E,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,gBAAgB,CAAC,CAAC,CAAC;YAC7D,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;gBACrB,cAAc,GAAG,gBAAgB;gBACjC,YAAY,GAAG,gBAAgB,GAAG,OAAO;gBACzC,UAAU,EAAE,aAAa,CAAC,GAAG;aAC9B,CAAC,CAAC;SACJ;QAED,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,gBAAgB,EAAE,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC;KAChF;AACH,CAAC;AAnBD,0DAmBC"}
|
||||||
31
lib/external-queries.test.js
generated
Normal file
31
lib/external-queries.test.js
generated
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
|
const externalQueries = __importStar(require("./external-queries"));
|
||||||
|
const util = __importStar(require("./util"));
|
||||||
|
ava_1.default("checkoutExternalQueries", async (t) => {
|
||||||
|
let config = new configUtils.Config();
|
||||||
|
config.externalQueries = [
|
||||||
|
new configUtils.ExternalQuery("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b"),
|
||||||
|
];
|
||||||
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env["RUNNER_WORKSPACE"] = tmpDir;
|
||||||
|
await externalQueries.checkoutExternalQueries(config);
|
||||||
|
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
|
||||||
|
t.true(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT")));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=external-queries.test.js.map
|
||||||
1
lib/external-queries.test.js.map
Normal file
1
lib/external-queries.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,4DAA8C;AAC9C,oEAAsD;AACtD,6CAA+B;AAE/B,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IACtC,IAAI,MAAM,GAAG,IAAI,WAAW,CAAC,MAAM,EAAE,CAAC;IACtC,MAAM,CAAC,eAAe,GAAG;QACrB,IAAI,WAAW,CAAC,aAAa,CAAC,kBAAkB,EAAE,0CAA0C,CAAC;KAChG,CAAC;IAEF,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAC,MAAM,EAAC,EAAE;QACjC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAC;QACzC,MAAM,eAAe,CAAC,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtD,uFAAuF;QACvF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;IACjF,CAAC,CAAC,CAAC;AACP,CAAC,CAAC,CAAC"}
|
||||||
1
lib/finalize-db.js
generated
1
lib/finalize-db.js
generated
@@ -142,3 +142,4 @@ run().catch(e => {
|
|||||||
core.setFailed("analyze action failed: " + e);
|
core.setFailed("analyze action failed: " + e);
|
||||||
console.log(e);
|
console.log(e);
|
||||||
});
|
});
|
||||||
|
//# sourceMappingURL=finalize-db.js.map
|
||||||
1
lib/finalize-db.js.map
Normal file
1
lib/finalize-db.js.map
Normal file
File diff suppressed because one or more lines are too long
1
lib/fingerprints.js
generated
1
lib/fingerprints.js
generated
@@ -245,3 +245,4 @@ function addFingerprints(sarifContents) {
|
|||||||
return JSON.stringify(sarif);
|
return JSON.stringify(sarif);
|
||||||
}
|
}
|
||||||
exports.addFingerprints = addFingerprints;
|
exports.addFingerprints = addFingerprints;
|
||||||
|
//# sourceMappingURL=fingerprints.js.map
|
||||||
1
lib/fingerprints.js.map
Normal file
1
lib/fingerprints.js.map
Normal file
File diff suppressed because one or more lines are too long
157
lib/fingerprints.test.js
generated
Normal file
157
lib/fingerprints.test.js
generated
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const fingerprints = __importStar(require("./fingerprints"));
|
||||||
|
function testHash(t, input, expectedHashes) {
|
||||||
|
let index = 0;
|
||||||
|
let callback = function (lineNumber, hash) {
|
||||||
|
t.is(lineNumber, index + 1);
|
||||||
|
t.is(hash, expectedHashes[index]);
|
||||||
|
index++;
|
||||||
|
};
|
||||||
|
fingerprints.hash(callback, input);
|
||||||
|
t.is(index, input.split(/\r\n|\r|\n/).length);
|
||||||
|
}
|
||||||
|
ava_1.default('hash', (t) => {
|
||||||
|
// Try empty file
|
||||||
|
testHash(t, "", ["c129715d7a2bc9a3:1"]);
|
||||||
|
// Try various combinations of newline characters
|
||||||
|
testHash(t, " a\nb\n \t\tc\n d", [
|
||||||
|
"271789c17abda88f:1",
|
||||||
|
"54703d4cd895b18:1",
|
||||||
|
"180aee12dab6264:1",
|
||||||
|
"a23a3dc5e078b07b:1"
|
||||||
|
]);
|
||||||
|
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End", [
|
||||||
|
"8b7cf3e952e7aeb2:1",
|
||||||
|
"b1ae1287ec4718d9:1",
|
||||||
|
"bff680108adb0fcc:1",
|
||||||
|
"c6805c5e1288b612:1",
|
||||||
|
"b86d3392aea1be30:1",
|
||||||
|
"e6ceba753e1a442:1",
|
||||||
|
]);
|
||||||
|
testHash(t, " hello; \t\nworld!!!\n\n\n \t\tGreetings\n End\n", [
|
||||||
|
"e9496ae3ebfced30:1",
|
||||||
|
"fb7c023a8b9ccb3f:1",
|
||||||
|
"ce8ba1a563dcdaca:1",
|
||||||
|
"e20e36e16fcb0cc8:1",
|
||||||
|
"b3edc88f2938467e:1",
|
||||||
|
"c8e28b0b4002a3a0:1",
|
||||||
|
"c129715d7a2bc9a3:1",
|
||||||
|
]);
|
||||||
|
testHash(t, " hello; \t\nworld!!!\r\r\r \t\tGreetings\r End\r", [
|
||||||
|
"e9496ae3ebfced30:1",
|
||||||
|
"fb7c023a8b9ccb3f:1",
|
||||||
|
"ce8ba1a563dcdaca:1",
|
||||||
|
"e20e36e16fcb0cc8:1",
|
||||||
|
"b3edc88f2938467e:1",
|
||||||
|
"c8e28b0b4002a3a0:1",
|
||||||
|
"c129715d7a2bc9a3:1",
|
||||||
|
]);
|
||||||
|
testHash(t, " hello; \t\r\nworld!!!\r\n\r\n\r\n \t\tGreetings\r\n End\r\n", [
|
||||||
|
"e9496ae3ebfced30:1",
|
||||||
|
"fb7c023a8b9ccb3f:1",
|
||||||
|
"ce8ba1a563dcdaca:1",
|
||||||
|
"e20e36e16fcb0cc8:1",
|
||||||
|
"b3edc88f2938467e:1",
|
||||||
|
"c8e28b0b4002a3a0:1",
|
||||||
|
"c129715d7a2bc9a3:1",
|
||||||
|
]);
|
||||||
|
testHash(t, " hello; \t\nworld!!!\r\n\n\r \t\tGreetings\r End\r\n", [
|
||||||
|
"e9496ae3ebfced30:1",
|
||||||
|
"fb7c023a8b9ccb3f:1",
|
||||||
|
"ce8ba1a563dcdaca:1",
|
||||||
|
"e20e36e16fcb0cc8:1",
|
||||||
|
"b3edc88f2938467e:1",
|
||||||
|
"c8e28b0b4002a3a0:1",
|
||||||
|
"c129715d7a2bc9a3:1",
|
||||||
|
]);
|
||||||
|
// Try repeating line that will generate identical hashes
|
||||||
|
testHash(t, "Lorem ipsum dolor sit amet.\n".repeat(10), [
|
||||||
|
"a7f2ff13bc495cf2:1",
|
||||||
|
"a7f2ff13bc495cf2:2",
|
||||||
|
"a7f2ff13bc495cf2:3",
|
||||||
|
"a7f2ff13bc495cf2:4",
|
||||||
|
"a7f2ff13bc495cf2:5",
|
||||||
|
"a7f2ff13bc495cf2:6",
|
||||||
|
"a7f2ff1481e87703:1",
|
||||||
|
"a9cf91f7bbf1862b:1",
|
||||||
|
"55ec222b86bcae53:1",
|
||||||
|
"cc97dc7b1d7d8f7b:1",
|
||||||
|
"c129715d7a2bc9a3:1"
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
function testResolveUriToFile(uri, index, artifactsURIs) {
|
||||||
|
const location = { "uri": uri, "index": index };
|
||||||
|
const artifacts = artifactsURIs.map(uri => ({ "location": { "uri": uri } }));
|
||||||
|
return fingerprints.resolveUriToFile(location, artifacts);
|
||||||
|
}
|
||||||
|
ava_1.default('resolveUriToFile', t => {
|
||||||
|
// The resolveUriToFile method checks that the file exists and is in the right directory
|
||||||
|
// so we need to give it real files to look at. We will use this file as an example.
|
||||||
|
// For this to work we require the current working directory to be a parent, but this
|
||||||
|
// should generally always be the case so this is fine.
|
||||||
|
const cwd = process.cwd();
|
||||||
|
const filepath = __filename;
|
||||||
|
t.true(filepath.startsWith(cwd + '/'));
|
||||||
|
const relativeFilepaht = filepath.substring(cwd.length + 1);
|
||||||
|
process.env['GITHUB_WORKSPACE'] = cwd;
|
||||||
|
// Absolute paths are unmodified
|
||||||
|
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
|
||||||
|
t.is(testResolveUriToFile('file://' + filepath, undefined, []), filepath);
|
||||||
|
// Relative paths are made absolute
|
||||||
|
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
|
||||||
|
t.is(testResolveUriToFile('file://' + relativeFilepaht, undefined, []), filepath);
|
||||||
|
// Absolute paths outside the src root are discarded
|
||||||
|
t.is(testResolveUriToFile('/src/foo/bar.js', undefined, []), undefined);
|
||||||
|
t.is(testResolveUriToFile('file:///src/foo/bar.js', undefined, []), undefined);
|
||||||
|
// Other schemes are discarded
|
||||||
|
t.is(testResolveUriToFile('https://' + filepath, undefined, []), undefined);
|
||||||
|
t.is(testResolveUriToFile('ftp://' + filepath, undefined, []), undefined);
|
||||||
|
// Invalid URIs are discarded
|
||||||
|
t.is(testResolveUriToFile(1, undefined, []), undefined);
|
||||||
|
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
|
||||||
|
// Non-existant files are discarded
|
||||||
|
t.is(testResolveUriToFile(filepath + '2', undefined, []), undefined);
|
||||||
|
// Index is resolved
|
||||||
|
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);
|
||||||
|
t.is(testResolveUriToFile(undefined, 1, ['foo', filepath]), filepath);
|
||||||
|
// Invalid indexes are discarded
|
||||||
|
t.is(testResolveUriToFile(undefined, 1, [filepath]), undefined);
|
||||||
|
t.is(testResolveUriToFile(undefined, '0', [filepath]), undefined);
|
||||||
|
});
|
||||||
|
ava_1.default('addFingerprints', t => {
|
||||||
|
// Run an end-to-end test on a test file
|
||||||
|
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.input.sarif').toString();
|
||||||
|
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting.expected.sarif').toString();
|
||||||
|
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||||
|
input = JSON.stringify(JSON.parse(input));
|
||||||
|
expected = JSON.stringify(JSON.parse(expected));
|
||||||
|
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||||
|
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||||
|
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||||
|
});
|
||||||
|
ava_1.default('missingRegions', t => {
|
||||||
|
// Run an end-to-end test on a test file
|
||||||
|
let input = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.input.sarif').toString();
|
||||||
|
let expected = fs.readFileSync(__dirname + '/../src/testdata/fingerprinting2.expected.sarif').toString();
|
||||||
|
// The test files are stored prettified, but addFingerprints outputs condensed JSON
|
||||||
|
input = JSON.stringify(JSON.parse(input));
|
||||||
|
expected = JSON.stringify(JSON.parse(expected));
|
||||||
|
// The URIs in the SARIF files resolve to files in the testdata directory
|
||||||
|
process.env['GITHUB_WORKSPACE'] = path.normalize(__dirname + '/../src/testdata');
|
||||||
|
t.deepEqual(fingerprints.addFingerprints(input), expected);
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=fingerprints.test.js.map
|
||||||
1
lib/fingerprints.test.js.map
Normal file
1
lib/fingerprints.test.js.map
Normal file
File diff suppressed because one or more lines are too long
23
lib/setup-tools.js
generated
23
lib/setup-tools.js
generated
@@ -37,15 +37,22 @@ exports.CodeQLSetup = CodeQLSetup;
|
|||||||
async function setupCodeQL() {
|
async function setupCodeQL() {
|
||||||
const version = '1.0.0';
|
const version = '1.0.0';
|
||||||
const codeqlURL = core.getInput('tools', { required: true });
|
const codeqlURL = core.getInput('tools', { required: true });
|
||||||
let codeqlFolder = toolcache.find('CodeQL', version);
|
try {
|
||||||
if (codeqlFolder) {
|
let codeqlFolder = toolcache.find('CodeQL', version);
|
||||||
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
if (codeqlFolder) {
|
||||||
|
core.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
||||||
|
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||||
|
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', version);
|
||||||
|
}
|
||||||
|
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
||||||
}
|
}
|
||||||
else {
|
catch (e) {
|
||||||
const codeqlPath = await toolcache.downloadTool(codeqlURL);
|
core.error(e);
|
||||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
throw new Error("Unable to download and extract CodeQL CLI");
|
||||||
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, 'CodeQL', version);
|
|
||||||
}
|
}
|
||||||
return new CodeQLSetup(path.join(codeqlFolder, 'codeql'));
|
|
||||||
}
|
}
|
||||||
exports.setupCodeQL = setupCodeQL;
|
exports.setupCodeQL = setupCodeQL;
|
||||||
|
//# sourceMappingURL=setup-tools.js.map
|
||||||
1
lib/setup-tools.js.map
Normal file
1
lib/setup-tools.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"setup-tools.js","sourceRoot":"","sources":["../src/setup-tools.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,+DAAiD;AACjD,2CAA6B;AAE7B,MAAa,WAAW;IAMpB,YAAY,UAAkB;QAC1B,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;QACvB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAC3C,4BAA4B;QAC5B,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAC9B,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;YACxB,IAAI,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBAC7B,IAAI,CAAC,GAAG,IAAI,MAAM,CAAC;aACtB;SACJ;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YACrC,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;SAC7B;aAAM,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;YACtC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;SAC3B;aAAM;YACH,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;SAC/D;IACL,CAAC;CACJ;AAxBD,kCAwBC;AAEM,KAAK,UAAU,WAAW;IAC7B,MAAM,OAAO,GAAG,OAAO,CAAC;IACxB,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7D,IAAI;QACA,IAAI,YAAY,GAAG,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QACrD,IAAI,YAAY,EAAE;YACd,IAAI,CAAC,KAAK,CAAC,yBAAyB,YAAY,EAAE,CAAC,CAAC;SACvD;aAAM;YACH,MAAM,UAAU,GAAG,MAAM,SAAS,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,eAAe,GAAG,MAAM,SAAS,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC/D,YAAY,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,eAAe,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC;SAC/E;QACD,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAE7D;IAAC,OAAO,CAAC,EAAE;QACR,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACd,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC;KAChE;AACL,CAAC;AAnBD,kCAmBC"}
|
||||||
1
lib/setup-tracer.js
generated
1
lib/setup-tracer.js
generated
@@ -212,3 +212,4 @@ run().catch(e => {
|
|||||||
core.setFailed("init action failed: " + e);
|
core.setFailed("init action failed: " + e);
|
||||||
console.log(e);
|
console.log(e);
|
||||||
});
|
});
|
||||||
|
//# sourceMappingURL=setup-tracer.js.map
|
||||||
1
lib/setup-tracer.js.map
Normal file
1
lib/setup-tracer.js.map
Normal file
File diff suppressed because one or more lines are too long
2
lib/shared-environment.js
generated
2
lib/shared-environment.js
generated
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
exports.CODEQL_ACTION_CMD = 'CODEQL_ACTION_CMD';
|
exports.CODEQL_ACTION_CMD = 'CODEQL_ACTION_CMD';
|
||||||
exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
|
exports.CODEQL_ACTION_DATABASE_DIR = 'CODEQL_ACTION_DATABASE_DIR';
|
||||||
exports.CODEQL_ACTION_LANGUAGES = 'CODEQL_ACTION_LANGUAGES';
|
exports.CODEQL_ACTION_LANGUAGES = 'CODEQL_ACTION_LANGUAGES';
|
||||||
|
exports.CODEQL_ACTION_ANALYSIS_KEY = 'CODEQL_ACTION_ANALYSIS_KEY';
|
||||||
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
|
exports.ODASA_TRACER_CONFIGURATION = 'ODASA_TRACER_CONFIGURATION';
|
||||||
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
|
exports.CODEQL_ACTION_SCANNED_LANGUAGES = 'CODEQL_ACTION_SCANNED_LANGUAGES';
|
||||||
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
||||||
@@ -14,3 +15,4 @@ exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
|||||||
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
|
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
|
||||||
// Populated when the init action completes successfully
|
// Populated when the init action completes successfully
|
||||||
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
|
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
|
||||||
|
//# sourceMappingURL=shared-environment.js.map
|
||||||
1
lib/shared-environment.js.map
Normal file
1
lib/shared-environment.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,iBAAiB,GAAG,mBAAmB,CAAC;AACxC,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AACpD,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,+BAA+B,GAAG,iCAAiC,CAAC;AACpE,QAAA,8BAA8B,GAAG,gCAAgC,CAAC;AAC/E,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,wBAAwB,GAAG,0BAA0B,CAAC;AACnE,wDAAwD;AAC3C,QAAA,4BAA4B,GAAG,8BAA8B,CAAC"}
|
||||||
1
lib/tracer-env.js
generated
1
lib/tracer-env.js
generated
@@ -18,3 +18,4 @@ for (let entry of Object.entries(process.env)) {
|
|||||||
}
|
}
|
||||||
process.stdout.write(process.argv[2]);
|
process.stdout.write(process.argv[2]);
|
||||||
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');
|
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');
|
||||||
|
//# sourceMappingURL=tracer-env.js.map
|
||||||
1
lib/tracer-env.js.map
Normal file
1
lib/tracer-env.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"tracer-env.js","sourceRoot":"","sources":["../src/tracer-env.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AAEzB,MAAM,GAAG,GAAG,EAAE,CAAC;AACf,KAAK,IAAI,KAAK,IAAI,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;IAC3C,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACrB,MAAM,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACvB,IAAI,OAAO,KAAK,KAAK,WAAW,IAAI,GAAG,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,kBAAkB,CAAC,EAAE;QACpF,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KACpB;CACJ;AACD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;AACtC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,CAAC"}
|
||||||
10
lib/upload-lib.js
generated
10
lib/upload-lib.js
generated
@@ -58,6 +58,11 @@ exports.combineSarifFiles = combineSarifFiles;
|
|||||||
// If the request fails then this will retry a small number of times.
|
// If the request fails then this will retry a small number of times.
|
||||||
async function uploadPayload(payload) {
|
async function uploadPayload(payload) {
|
||||||
core.info('Uploading results');
|
core.info('Uploading results');
|
||||||
|
// If in test mode we don't want to upload the results
|
||||||
|
const testMode = process.env['TEST_MODE'] === 'true' || false;
|
||||||
|
if (testMode) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
const githubToken = core.getInput('token');
|
const githubToken = core.getInput('token');
|
||||||
const ph = new auth.BearerCredentialHandler(githubToken);
|
const ph = new auth.BearerCredentialHandler(githubToken);
|
||||||
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
||||||
@@ -136,7 +141,8 @@ async function uploadFiles(sarifFiles) {
|
|||||||
}
|
}
|
||||||
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
||||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||||
const ref = util.getRequiredEnvParam('GITHUB_REF'); // it's in the form "refs/heads/master"
|
const ref = util.getRef();
|
||||||
|
const analysisKey = await util.getAnalysisKey();
|
||||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||||
@@ -158,6 +164,7 @@ async function uploadFiles(sarifFiles) {
|
|||||||
const payload = JSON.stringify({
|
const payload = JSON.stringify({
|
||||||
"commit_oid": commitOid,
|
"commit_oid": commitOid,
|
||||||
"ref": ref,
|
"ref": ref,
|
||||||
|
"analysis_key": analysisKey,
|
||||||
"analysis_name": analysisName,
|
"analysis_name": analysisName,
|
||||||
"sarif": zipped_sarif,
|
"sarif": zipped_sarif,
|
||||||
"workflow_run_id": workflowRunID,
|
"workflow_run_id": workflowRunID,
|
||||||
@@ -177,3 +184,4 @@ async function uploadFiles(sarifFiles) {
|
|||||||
core.endGroup();
|
core.endGroup();
|
||||||
return succeeded;
|
return succeeded;
|
||||||
}
|
}
|
||||||
|
//# sourceMappingURL=upload-lib.js.map
|
||||||
1
lib/upload-lib.js.map
Normal file
1
lib/upload-lib.js.map
Normal file
File diff suppressed because one or more lines are too long
1
lib/upload-sarif.js
generated
1
lib/upload-sarif.js
generated
@@ -32,3 +32,4 @@ run().catch(e => {
|
|||||||
core.setFailed("codeql/upload-sarif action failed: " + e);
|
core.setFailed("codeql/upload-sarif action failed: " + e);
|
||||||
console.log(e);
|
console.log(e);
|
||||||
});
|
});
|
||||||
|
//# sourceMappingURL=upload-sarif.js.map
|
||||||
1
lib/upload-sarif.js.map
Normal file
1
lib/upload-sarif.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,yDAA2C;AAC3C,6CAA+B;AAE/B,KAAK,UAAU,GAAG;IACd,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,KAAK,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,cAAc,CAAC,EAAE;QAC9F,OAAO;KACV;IAED,IAAI;QACA,IAAI,MAAM,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE;YACtD,MAAM,IAAI,CAAC,qBAAqB,CAAC,cAAc,CAAC,CAAC;SACpD;aAAM;YACH,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;SAC3D;KACJ;IAAC,OAAO,KAAK,EAAE;QACZ,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QAC1E,OAAO;KACV;AACL,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACZ,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACnB,CAAC,CAAC,CAAC"}
|
||||||
62
lib/util.js
generated
62
lib/util.js
generated
@@ -15,6 +15,8 @@ const http = __importStar(require("@actions/http-client"));
|
|||||||
const auth = __importStar(require("@actions/http-client/auth"));
|
const auth = __importStar(require("@actions/http-client/auth"));
|
||||||
const octokit = __importStar(require("@octokit/rest"));
|
const octokit = __importStar(require("@octokit/rest"));
|
||||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const os = __importStar(require("os"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
/**
|
/**
|
||||||
@@ -149,6 +151,55 @@ async function getLanguages() {
|
|||||||
return languages;
|
return languages;
|
||||||
}
|
}
|
||||||
exports.getLanguages = getLanguages;
|
exports.getLanguages = getLanguages;
|
||||||
|
/**
|
||||||
|
* Get the path of the currently executing workflow.
|
||||||
|
*/
|
||||||
|
async function getWorkflowPath() {
|
||||||
|
const repo_nwo = getRequiredEnvParam('GITHUB_REPOSITORY').split("/");
|
||||||
|
const owner = repo_nwo[0];
|
||||||
|
const repo = repo_nwo[1];
|
||||||
|
const run_id = getRequiredEnvParam('GITHUB_RUN_ID');
|
||||||
|
const ok = new octokit.Octokit({
|
||||||
|
auth: core.getInput('token'),
|
||||||
|
userAgent: "CodeQL Action",
|
||||||
|
log: console_log_level_1.default({ level: 'debug' })
|
||||||
|
});
|
||||||
|
const runsResponse = await ok.request('GET /repos/:owner/:repo/actions/runs/:run_id', {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
run_id
|
||||||
|
});
|
||||||
|
const workflowUrl = runsResponse.data.workflow_url;
|
||||||
|
const workflowResponse = await ok.request('GET ' + workflowUrl);
|
||||||
|
return workflowResponse.data.path;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Get the analysis key paramter for the current job.
|
||||||
|
*
|
||||||
|
* This will combine the workflow path and current job name.
|
||||||
|
* Computing this the first time requires making requests to
|
||||||
|
* the github API, but after that the result will be cached.
|
||||||
|
*/
|
||||||
|
async function getAnalysisKey() {
|
||||||
|
let analysisKey = process.env[sharedEnv.CODEQL_ACTION_ANALYSIS_KEY];
|
||||||
|
if (analysisKey !== undefined) {
|
||||||
|
return analysisKey;
|
||||||
|
}
|
||||||
|
const workflowPath = await getWorkflowPath();
|
||||||
|
const jobName = getRequiredEnvParam('GITHUB_JOB');
|
||||||
|
analysisKey = workflowPath + ':' + jobName;
|
||||||
|
core.exportVariable(sharedEnv.CODEQL_ACTION_ANALYSIS_KEY, analysisKey);
|
||||||
|
return analysisKey;
|
||||||
|
}
|
||||||
|
exports.getAnalysisKey = getAnalysisKey;
|
||||||
|
/**
|
||||||
|
* Get the ref currently being analyzed.
|
||||||
|
*/
|
||||||
|
function getRef() {
|
||||||
|
// it's in the form "refs/heads/master"
|
||||||
|
return getRequiredEnvParam('GITHUB_REF');
|
||||||
|
}
|
||||||
|
exports.getRef = getRef;
|
||||||
/**
|
/**
|
||||||
* Compose a StatusReport.
|
* Compose a StatusReport.
|
||||||
*
|
*
|
||||||
@@ -159,6 +210,7 @@ exports.getLanguages = getLanguages;
|
|||||||
*/
|
*/
|
||||||
async function createStatusReport(actionName, status, cause, exception) {
|
async function createStatusReport(actionName, status, cause, exception) {
|
||||||
const commitOid = process.env['GITHUB_SHA'] || '';
|
const commitOid = process.env['GITHUB_SHA'] || '';
|
||||||
|
const ref = getRef();
|
||||||
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
|
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
|
||||||
let workflowRunID = -1;
|
let workflowRunID = -1;
|
||||||
if (workflowRunIDStr) {
|
if (workflowRunIDStr) {
|
||||||
@@ -175,6 +227,7 @@ async function createStatusReport(actionName, status, cause, exception) {
|
|||||||
job_name: jobName,
|
job_name: jobName,
|
||||||
languages: languages,
|
languages: languages,
|
||||||
commit_oid: commitOid,
|
commit_oid: commitOid,
|
||||||
|
ref: ref,
|
||||||
action_name: actionName,
|
action_name: actionName,
|
||||||
action_oid: "unknown",
|
action_oid: "unknown",
|
||||||
started_at: startedAt,
|
started_at: startedAt,
|
||||||
@@ -280,3 +333,12 @@ function getToolNames(sarifContents) {
|
|||||||
return Object.keys(toolNames);
|
return Object.keys(toolNames);
|
||||||
}
|
}
|
||||||
exports.getToolNames = getToolNames;
|
exports.getToolNames = getToolNames;
|
||||||
|
// Creates a random temporary directory, runs the given body, and then deletes the directory.
|
||||||
|
// Mostly intended for use within tests.
|
||||||
|
async function withTmpDir(body) {
|
||||||
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
|
||||||
|
await body(tmpDir);
|
||||||
|
fs.rmdirSync(tmpDir, { recursive: true });
|
||||||
|
}
|
||||||
|
exports.withTmpDir = withTmpDir;
|
||||||
|
//# sourceMappingURL=util.js.map
|
||||||
1
lib/util.js.map
Normal file
1
lib/util.js.map
Normal file
File diff suppressed because one or more lines are too long
21
lib/util.test.js
generated
Normal file
21
lib/util.test.js
generated
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||||
|
result["default"] = mod;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const util = __importStar(require("./util"));
|
||||||
|
ava_1.default('getToolNames', t => {
|
||||||
|
const input = fs.readFileSync(__dirname + '/../src/testdata/tool-names.sarif', 'utf8');
|
||||||
|
const toolNames = util.getToolNames(input);
|
||||||
|
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=util.test.js.map
|
||||||
1
lib/util.test.js.map
Normal file
1
lib/util.test.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"util.test.js","sourceRoot":"","sources":["../src/util.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AACvB,uCAAyB;AAEzB,6CAA+B;AAE/B,aAAI,CAAC,cAAc,EAAE,CAAC,CAAC,EAAE;IACvB,MAAM,KAAK,GAAG,EAAE,CAAC,YAAY,CAAC,SAAS,GAAG,mCAAmC,EAAE,MAAM,CAAC,CAAC;IACvF,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;IAC3C,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,+BAA+B,EAAE,QAAQ,CAAC,CAAC,CAAC;AACtE,CAAC,CAAC,CAAC"}
|
||||||
1
node_modules/.bin/atob
generated
vendored
1
node_modules/.bin/atob
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../atob/bin/atob.js
|
|
||||||
1
node_modules/.bin/ava
generated
vendored
Symbolic link
1
node_modules/.bin/ava
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../ava/cli.js
|
||||||
1
node_modules/.bin/escodegen
generated
vendored
1
node_modules/.bin/escodegen
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../escodegen/bin/escodegen.js
|
|
||||||
1
node_modules/.bin/esgenerate
generated
vendored
1
node_modules/.bin/esgenerate
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../escodegen/bin/esgenerate.js
|
|
||||||
1
node_modules/.bin/esparse
generated
vendored
1
node_modules/.bin/esparse
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../esprima/bin/esparse.js
|
|
||||||
1
node_modules/.bin/esvalidate
generated
vendored
1
node_modules/.bin/esvalidate
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../esprima/bin/esvalidate.js
|
|
||||||
1
node_modules/.bin/jest
generated
vendored
1
node_modules/.bin/jest
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../jest/bin/jest.js
|
|
||||||
1
node_modules/.bin/jest-runtime
generated
vendored
1
node_modules/.bin/jest-runtime
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../jest-runtime/bin/jest-runtime.js
|
|
||||||
1
node_modules/.bin/jsesc
generated
vendored
1
node_modules/.bin/jsesc
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../jsesc/bin/jsesc
|
|
||||||
1
node_modules/.bin/json5
generated
vendored
1
node_modules/.bin/json5
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../json5/lib/cli.js
|
|
||||||
1
node_modules/.bin/parser
generated
vendored
1
node_modules/.bin/parser
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../@babel/parser/bin/babel-parser.js
|
|
||||||
1
node_modules/.bin/rc
generated
vendored
Symbolic link
1
node_modules/.bin/rc
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../rc/cli.js
|
||||||
1
node_modules/.bin/removeNPMAbsolutePaths
generated
vendored
Symbolic link
1
node_modules/.bin/removeNPMAbsolutePaths
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../removeNPMAbsolutePaths/bin/removeNPMAbsolutePaths
|
||||||
1
node_modules/.bin/sane
generated
vendored
1
node_modules/.bin/sane
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../sane/src/cli.js
|
|
||||||
1
node_modules/.bin/sshpk-conv
generated
vendored
1
node_modules/.bin/sshpk-conv
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../sshpk/bin/sshpk-conv
|
|
||||||
1
node_modules/.bin/sshpk-sign
generated
vendored
1
node_modules/.bin/sshpk-sign
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../sshpk/bin/sshpk-sign
|
|
||||||
1
node_modules/.bin/sshpk-verify
generated
vendored
1
node_modules/.bin/sshpk-verify
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../sshpk/bin/sshpk-verify
|
|
||||||
1
node_modules/.bin/ts-jest
generated
vendored
1
node_modules/.bin/ts-jest
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../ts-jest/cli.js
|
|
||||||
1
node_modules/.bin/watch
generated
vendored
1
node_modules/.bin/watch
generated
vendored
@@ -1 +0,0 @@
|
|||||||
../@cnakazawa/watch/cli.js
|
|
||||||
8
node_modules/@actions/http-client/README.md
generated
vendored
8
node_modules/@actions/http-client/README.md
generated
vendored
@@ -18,6 +18,8 @@ A lightweight HTTP client optimized for use with actions, TypeScript with generi
|
|||||||
- Basic, Bearer and PAT Support out of the box. Extensible handlers for others.
|
- Basic, Bearer and PAT Support out of the box. Extensible handlers for others.
|
||||||
- Redirects supported
|
- Redirects supported
|
||||||
|
|
||||||
|
Features and releases [here](./RELEASES.md)
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
|
|
||||||
```
|
```
|
||||||
@@ -49,7 +51,11 @@ export NODE_DEBUG=http
|
|||||||
|
|
||||||
## Node support
|
## Node support
|
||||||
|
|
||||||
The http-client is built using the latest LTS version of Node 12. We also support the latest LTS for Node 6, 8 and Node 10.
|
The http-client is built using the latest LTS version of Node 12. It may work on previous node LTS versions but it's tested and officially supported on Node12+.
|
||||||
|
|
||||||
|
## Support and Versioning
|
||||||
|
|
||||||
|
We follow semver and will hold compatibility between major versions and increment the minor version with new features and capabilities (while holding compat).
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
|
|||||||
16
node_modules/@actions/http-client/RELEASES.md
generated
vendored
Normal file
16
node_modules/@actions/http-client/RELEASES.md
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
## Releases
|
||||||
|
|
||||||
|
## 1.0.7
|
||||||
|
Update NPM dependencies and add 429 to the list of HttpCodes
|
||||||
|
|
||||||
|
## 1.0.6
|
||||||
|
Automatically sends Content-Type and Accept application/json headers for \<verb>Json() helper methods if not set in the client or parameters.
|
||||||
|
|
||||||
|
## 1.0.5
|
||||||
|
Adds \<verb>Json() helper methods for json over http scenarios.
|
||||||
|
|
||||||
|
## 1.0.4
|
||||||
|
Started to add \<verb>Json() helper methods. Do not use this release for that. Use >= 1.0.5 since there was an issue with types.
|
||||||
|
|
||||||
|
## 1.0.1 to 1.0.3
|
||||||
|
Adds proxy support.
|
||||||
7
node_modules/@actions/http-client/auth.js
generated
vendored
7
node_modules/@actions/http-client/auth.js
generated
vendored
@@ -6,7 +6,9 @@ class BasicCredentialHandler {
|
|||||||
this.password = password;
|
this.password = password;
|
||||||
}
|
}
|
||||||
prepareRequest(options) {
|
prepareRequest(options) {
|
||||||
options.headers['Authorization'] = 'Basic ' + Buffer.from(this.username + ':' + this.password).toString('base64');
|
options.headers['Authorization'] =
|
||||||
|
'Basic ' +
|
||||||
|
Buffer.from(this.username + ':' + this.password).toString('base64');
|
||||||
}
|
}
|
||||||
// This handler cannot handle 401
|
// This handler cannot handle 401
|
||||||
canHandleAuthentication(response) {
|
canHandleAuthentication(response) {
|
||||||
@@ -42,7 +44,8 @@ class PersonalAccessTokenCredentialHandler {
|
|||||||
// currently implements pre-authorization
|
// currently implements pre-authorization
|
||||||
// TODO: support preAuth = false where it hooks on 401
|
// TODO: support preAuth = false where it hooks on 401
|
||||||
prepareRequest(options) {
|
prepareRequest(options) {
|
||||||
options.headers['Authorization'] = 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
options.headers['Authorization'] =
|
||||||
|
'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
||||||
}
|
}
|
||||||
// This handler cannot handle 401
|
// This handler cannot handle 401
|
||||||
canHandleAuthentication(response) {
|
canHandleAuthentication(response) {
|
||||||
|
|||||||
24
node_modules/@actions/http-client/index.d.ts
generated
vendored
24
node_modules/@actions/http-client/index.d.ts
generated
vendored
@@ -1,5 +1,5 @@
|
|||||||
/// <reference types="node" />
|
/// <reference types="node" />
|
||||||
import http = require("http");
|
import http = require('http');
|
||||||
import ifm = require('./interfaces');
|
import ifm = require('./interfaces');
|
||||||
export declare enum HttpCodes {
|
export declare enum HttpCodes {
|
||||||
OK = 200,
|
OK = 200,
|
||||||
@@ -23,12 +23,20 @@ export declare enum HttpCodes {
|
|||||||
RequestTimeout = 408,
|
RequestTimeout = 408,
|
||||||
Conflict = 409,
|
Conflict = 409,
|
||||||
Gone = 410,
|
Gone = 410,
|
||||||
|
TooManyRequests = 429,
|
||||||
InternalServerError = 500,
|
InternalServerError = 500,
|
||||||
NotImplemented = 501,
|
NotImplemented = 501,
|
||||||
BadGateway = 502,
|
BadGateway = 502,
|
||||||
ServiceUnavailable = 503,
|
ServiceUnavailable = 503,
|
||||||
GatewayTimeout = 504
|
GatewayTimeout = 504
|
||||||
}
|
}
|
||||||
|
export declare enum Headers {
|
||||||
|
Accept = "accept",
|
||||||
|
ContentType = "content-type"
|
||||||
|
}
|
||||||
|
export declare enum MediaTypes {
|
||||||
|
ApplicationJson = "application/json"
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
@@ -39,11 +47,6 @@ export declare class HttpClientResponse implements ifm.IHttpClientResponse {
|
|||||||
message: http.IncomingMessage;
|
message: http.IncomingMessage;
|
||||||
readBody(): Promise<string>;
|
readBody(): Promise<string>;
|
||||||
}
|
}
|
||||||
export interface ITypedResponse<T> {
|
|
||||||
statusCode: number;
|
|
||||||
result: T | null;
|
|
||||||
headers: Object;
|
|
||||||
}
|
|
||||||
export declare function isHttps(requestUrl: string): boolean;
|
export declare function isHttps(requestUrl: string): boolean;
|
||||||
export declare class HttpClient {
|
export declare class HttpClient {
|
||||||
userAgent: string | undefined;
|
userAgent: string | undefined;
|
||||||
@@ -73,10 +76,10 @@ export declare class HttpClient {
|
|||||||
* Gets a typed object from an endpoint
|
* Gets a typed object from an endpoint
|
||||||
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||||
*/
|
*/
|
||||||
getJson<T>(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
getJson<T>(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||||
postJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
postJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||||
putJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
putJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||||
patchJson<T>(requestUrl: string, obj: T, additionalHeaders?: ifm.IHeaders): Promise<ITypedResponse<T>>;
|
patchJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||||
/**
|
/**
|
||||||
* Makes a raw http request.
|
* Makes a raw http request.
|
||||||
* All other methods such as get, post, patch, and request ultimately call this.
|
* All other methods such as get, post, patch, and request ultimately call this.
|
||||||
@@ -108,6 +111,7 @@ export declare class HttpClient {
|
|||||||
getAgent(serverUrl: string): http.Agent;
|
getAgent(serverUrl: string): http.Agent;
|
||||||
private _prepareRequest;
|
private _prepareRequest;
|
||||||
private _mergeHeaders;
|
private _mergeHeaders;
|
||||||
|
private _getExistingOrDefaultHeader;
|
||||||
private _getAgent;
|
private _getAgent;
|
||||||
private _performExponentialBackoff;
|
private _performExponentialBackoff;
|
||||||
private static dateTimeDeserializer;
|
private static dateTimeDeserializer;
|
||||||
|
|||||||
111
node_modules/@actions/http-client/index.js
generated
vendored
111
node_modules/@actions/http-client/index.js
generated
vendored
@@ -28,12 +28,22 @@ var HttpCodes;
|
|||||||
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
||||||
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
||||||
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
||||||
|
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
||||||
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
||||||
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
||||||
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
||||||
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
||||||
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
||||||
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
||||||
|
var Headers;
|
||||||
|
(function (Headers) {
|
||||||
|
Headers["Accept"] = "accept";
|
||||||
|
Headers["ContentType"] = "content-type";
|
||||||
|
})(Headers = exports.Headers || (exports.Headers = {}));
|
||||||
|
var MediaTypes;
|
||||||
|
(function (MediaTypes) {
|
||||||
|
MediaTypes["ApplicationJson"] = "application/json";
|
||||||
|
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
||||||
/**
|
/**
|
||||||
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
@@ -43,8 +53,18 @@ function getProxyUrl(serverUrl) {
|
|||||||
return proxyUrl ? proxyUrl.href : '';
|
return proxyUrl ? proxyUrl.href : '';
|
||||||
}
|
}
|
||||||
exports.getProxyUrl = getProxyUrl;
|
exports.getProxyUrl = getProxyUrl;
|
||||||
const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect];
|
const HttpRedirectCodes = [
|
||||||
const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout];
|
HttpCodes.MovedPermanently,
|
||||||
|
HttpCodes.ResourceMoved,
|
||||||
|
HttpCodes.SeeOther,
|
||||||
|
HttpCodes.TemporaryRedirect,
|
||||||
|
HttpCodes.PermanentRedirect
|
||||||
|
];
|
||||||
|
const HttpResponseRetryCodes = [
|
||||||
|
HttpCodes.BadGateway,
|
||||||
|
HttpCodes.ServiceUnavailable,
|
||||||
|
HttpCodes.GatewayTimeout
|
||||||
|
];
|
||||||
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
||||||
const ExponentialBackoffCeiling = 10;
|
const ExponentialBackoffCeiling = 10;
|
||||||
const ExponentialBackoffTimeSlice = 5;
|
const ExponentialBackoffTimeSlice = 5;
|
||||||
@@ -136,22 +156,29 @@ class HttpClient {
|
|||||||
* Gets a typed object from an endpoint
|
* Gets a typed object from an endpoint
|
||||||
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||||
*/
|
*/
|
||||||
async getJson(requestUrl, additionalHeaders) {
|
async getJson(requestUrl, additionalHeaders = {}) {
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
let res = await this.get(requestUrl, additionalHeaders);
|
let res = await this.get(requestUrl, additionalHeaders);
|
||||||
return this._processResponse(res, this.requestOptions);
|
return this._processResponse(res, this.requestOptions);
|
||||||
}
|
}
|
||||||
async postJson(requestUrl, obj, additionalHeaders) {
|
async postJson(requestUrl, obj, additionalHeaders = {}) {
|
||||||
let data = JSON.stringify(obj, null, 2);
|
let data = JSON.stringify(obj, null, 2);
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||||
let res = await this.post(requestUrl, data, additionalHeaders);
|
let res = await this.post(requestUrl, data, additionalHeaders);
|
||||||
return this._processResponse(res, this.requestOptions);
|
return this._processResponse(res, this.requestOptions);
|
||||||
}
|
}
|
||||||
async putJson(requestUrl, obj, additionalHeaders) {
|
async putJson(requestUrl, obj, additionalHeaders = {}) {
|
||||||
let data = JSON.stringify(obj, null, 2);
|
let data = JSON.stringify(obj, null, 2);
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||||
let res = await this.put(requestUrl, data, additionalHeaders);
|
let res = await this.put(requestUrl, data, additionalHeaders);
|
||||||
return this._processResponse(res, this.requestOptions);
|
return this._processResponse(res, this.requestOptions);
|
||||||
}
|
}
|
||||||
async patchJson(requestUrl, obj, additionalHeaders) {
|
async patchJson(requestUrl, obj, additionalHeaders = {}) {
|
||||||
let data = JSON.stringify(obj, null, 2);
|
let data = JSON.stringify(obj, null, 2);
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||||
let res = await this.patch(requestUrl, data, additionalHeaders);
|
let res = await this.patch(requestUrl, data, additionalHeaders);
|
||||||
return this._processResponse(res, this.requestOptions);
|
return this._processResponse(res, this.requestOptions);
|
||||||
}
|
}
|
||||||
@@ -162,18 +189,22 @@ class HttpClient {
|
|||||||
*/
|
*/
|
||||||
async request(verb, requestUrl, data, headers) {
|
async request(verb, requestUrl, data, headers) {
|
||||||
if (this._disposed) {
|
if (this._disposed) {
|
||||||
throw new Error("Client has already been disposed.");
|
throw new Error('Client has already been disposed.');
|
||||||
}
|
}
|
||||||
let parsedUrl = url.parse(requestUrl);
|
let parsedUrl = url.parse(requestUrl);
|
||||||
let info = this._prepareRequest(verb, parsedUrl, headers);
|
let info = this._prepareRequest(verb, parsedUrl, headers);
|
||||||
// Only perform retries on reads since writes may not be idempotent.
|
// Only perform retries on reads since writes may not be idempotent.
|
||||||
let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1;
|
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
|
||||||
|
? this._maxRetries + 1
|
||||||
|
: 1;
|
||||||
let numTries = 0;
|
let numTries = 0;
|
||||||
let response;
|
let response;
|
||||||
while (numTries < maxTries) {
|
while (numTries < maxTries) {
|
||||||
response = await this.requestRaw(info, data);
|
response = await this.requestRaw(info, data);
|
||||||
// Check if it's an authentication challenge
|
// Check if it's an authentication challenge
|
||||||
if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
|
if (response &&
|
||||||
|
response.message &&
|
||||||
|
response.message.statusCode === HttpCodes.Unauthorized) {
|
||||||
let authenticationHandler;
|
let authenticationHandler;
|
||||||
for (let i = 0; i < this.handlers.length; i++) {
|
for (let i = 0; i < this.handlers.length; i++) {
|
||||||
if (this.handlers[i].canHandleAuthentication(response)) {
|
if (this.handlers[i].canHandleAuthentication(response)) {
|
||||||
@@ -191,21 +222,32 @@ class HttpClient {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let redirectsRemaining = this._maxRedirects;
|
let redirectsRemaining = this._maxRedirects;
|
||||||
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1
|
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
|
||||||
&& this._allowRedirects
|
this._allowRedirects &&
|
||||||
&& redirectsRemaining > 0) {
|
redirectsRemaining > 0) {
|
||||||
const redirectUrl = response.message.headers["location"];
|
const redirectUrl = response.message.headers['location'];
|
||||||
if (!redirectUrl) {
|
if (!redirectUrl) {
|
||||||
// if there's no location to redirect to, we won't
|
// if there's no location to redirect to, we won't
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
let parsedRedirectUrl = url.parse(redirectUrl);
|
let parsedRedirectUrl = url.parse(redirectUrl);
|
||||||
if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) {
|
if (parsedUrl.protocol == 'https:' &&
|
||||||
throw new Error("Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.");
|
parsedUrl.protocol != parsedRedirectUrl.protocol &&
|
||||||
|
!this._allowRedirectDowngrade) {
|
||||||
|
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
||||||
}
|
}
|
||||||
// we need to finish reading the response before reassigning response
|
// we need to finish reading the response before reassigning response
|
||||||
// which will leak the open socket.
|
// which will leak the open socket.
|
||||||
await response.readBody();
|
await response.readBody();
|
||||||
|
// strip authorization header if redirected to a different hostname
|
||||||
|
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
||||||
|
for (let header in headers) {
|
||||||
|
// header names are case insensitive
|
||||||
|
if (header.toLowerCase() === 'authorization') {
|
||||||
|
delete headers[header];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
// let's make the request with the new redirectUrl
|
// let's make the request with the new redirectUrl
|
||||||
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
||||||
response = await this.requestRaw(info, data);
|
response = await this.requestRaw(info, data);
|
||||||
@@ -256,8 +298,8 @@ class HttpClient {
|
|||||||
*/
|
*/
|
||||||
requestRawWithCallback(info, data, onResult) {
|
requestRawWithCallback(info, data, onResult) {
|
||||||
let socket;
|
let socket;
|
||||||
if (typeof (data) === 'string') {
|
if (typeof data === 'string') {
|
||||||
info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8');
|
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
||||||
}
|
}
|
||||||
let callbackCalled = false;
|
let callbackCalled = false;
|
||||||
let handleResult = (err, res) => {
|
let handleResult = (err, res) => {
|
||||||
@@ -270,7 +312,7 @@ class HttpClient {
|
|||||||
let res = new HttpClientResponse(msg);
|
let res = new HttpClientResponse(msg);
|
||||||
handleResult(null, res);
|
handleResult(null, res);
|
||||||
});
|
});
|
||||||
req.on('socket', (sock) => {
|
req.on('socket', sock => {
|
||||||
socket = sock;
|
socket = sock;
|
||||||
});
|
});
|
||||||
// If we ever get disconnected, we want the socket to timeout eventually
|
// If we ever get disconnected, we want the socket to timeout eventually
|
||||||
@@ -285,10 +327,10 @@ class HttpClient {
|
|||||||
// res should have headers
|
// res should have headers
|
||||||
handleResult(err, null);
|
handleResult(err, null);
|
||||||
});
|
});
|
||||||
if (data && typeof (data) === 'string') {
|
if (data && typeof data === 'string') {
|
||||||
req.write(data, 'utf8');
|
req.write(data, 'utf8');
|
||||||
}
|
}
|
||||||
if (data && typeof (data) !== 'string') {
|
if (data && typeof data !== 'string') {
|
||||||
data.on('close', function () {
|
data.on('close', function () {
|
||||||
req.end();
|
req.end();
|
||||||
});
|
});
|
||||||
@@ -315,29 +357,40 @@ class HttpClient {
|
|||||||
const defaultPort = usingSsl ? 443 : 80;
|
const defaultPort = usingSsl ? 443 : 80;
|
||||||
info.options = {};
|
info.options = {};
|
||||||
info.options.host = info.parsedUrl.hostname;
|
info.options.host = info.parsedUrl.hostname;
|
||||||
info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort;
|
info.options.port = info.parsedUrl.port
|
||||||
info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
? parseInt(info.parsedUrl.port)
|
||||||
|
: defaultPort;
|
||||||
|
info.options.path =
|
||||||
|
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||||
info.options.method = method;
|
info.options.method = method;
|
||||||
info.options.headers = this._mergeHeaders(headers);
|
info.options.headers = this._mergeHeaders(headers);
|
||||||
if (this.userAgent != null) {
|
if (this.userAgent != null) {
|
||||||
info.options.headers["user-agent"] = this.userAgent;
|
info.options.headers['user-agent'] = this.userAgent;
|
||||||
}
|
}
|
||||||
info.options.agent = this._getAgent(info.parsedUrl);
|
info.options.agent = this._getAgent(info.parsedUrl);
|
||||||
// gives handlers an opportunity to participate
|
// gives handlers an opportunity to participate
|
||||||
if (this.handlers) {
|
if (this.handlers) {
|
||||||
this.handlers.forEach((handler) => {
|
this.handlers.forEach(handler => {
|
||||||
handler.prepareRequest(info.options);
|
handler.prepareRequest(info.options);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return info;
|
return info;
|
||||||
}
|
}
|
||||||
_mergeHeaders(headers) {
|
_mergeHeaders(headers) {
|
||||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||||
if (this.requestOptions && this.requestOptions.headers) {
|
if (this.requestOptions && this.requestOptions.headers) {
|
||||||
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
||||||
}
|
}
|
||||||
return lowercaseKeys(headers || {});
|
return lowercaseKeys(headers || {});
|
||||||
}
|
}
|
||||||
|
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
||||||
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||||
|
let clientHeader;
|
||||||
|
if (this.requestOptions && this.requestOptions.headers) {
|
||||||
|
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
||||||
|
}
|
||||||
|
return additionalHeaders[header] || clientHeader || _default;
|
||||||
|
}
|
||||||
_getAgent(parsedUrl) {
|
_getAgent(parsedUrl) {
|
||||||
let agent;
|
let agent;
|
||||||
let proxyUrl = pm.getProxyUrl(parsedUrl);
|
let proxyUrl = pm.getProxyUrl(parsedUrl);
|
||||||
@@ -369,7 +422,7 @@ class HttpClient {
|
|||||||
proxyAuth: proxyUrl.auth,
|
proxyAuth: proxyUrl.auth,
|
||||||
host: proxyUrl.hostname,
|
host: proxyUrl.hostname,
|
||||||
port: proxyUrl.port
|
port: proxyUrl.port
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
let tunnelAgent;
|
let tunnelAgent;
|
||||||
const overHttps = proxyUrl.protocol === 'https:';
|
const overHttps = proxyUrl.protocol === 'https:';
|
||||||
@@ -396,7 +449,9 @@ class HttpClient {
|
|||||||
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||||
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||||
// we have to cast it to any and change it directly
|
// we have to cast it to any and change it directly
|
||||||
agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false });
|
agent.options = Object.assign(agent.options || {}, {
|
||||||
|
rejectUnauthorized: false
|
||||||
|
});
|
||||||
}
|
}
|
||||||
return agent;
|
return agent;
|
||||||
}
|
}
|
||||||
@@ -457,7 +512,7 @@ class HttpClient {
|
|||||||
msg = contents;
|
msg = contents;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
msg = "Failed request: (" + statusCode + ")";
|
msg = 'Failed request: (' + statusCode + ')';
|
||||||
}
|
}
|
||||||
let err = new Error(msg);
|
let err = new Error(msg);
|
||||||
// attach statusCode and body obj (if available) to the error object
|
// attach statusCode and body obj (if available) to the error object
|
||||||
|
|||||||
9
node_modules/@actions/http-client/interfaces.d.ts
generated
vendored
9
node_modules/@actions/http-client/interfaces.d.ts
generated
vendored
@@ -1,6 +1,6 @@
|
|||||||
/// <reference types="node" />
|
/// <reference types="node" />
|
||||||
import http = require("http");
|
import http = require('http');
|
||||||
import url = require("url");
|
import url = require('url');
|
||||||
export interface IHeaders {
|
export interface IHeaders {
|
||||||
[key: string]: any;
|
[key: string]: any;
|
||||||
}
|
}
|
||||||
@@ -43,3 +43,8 @@ export interface IRequestOptions {
|
|||||||
allowRetries?: boolean;
|
allowRetries?: boolean;
|
||||||
maxRetries?: number;
|
maxRetries?: number;
|
||||||
}
|
}
|
||||||
|
export interface ITypedResponse<T> {
|
||||||
|
statusCode: number;
|
||||||
|
result: T | null;
|
||||||
|
headers: Object;
|
||||||
|
}
|
||||||
|
|||||||
1
node_modules/@actions/http-client/interfaces.js
generated
vendored
1
node_modules/@actions/http-client/interfaces.js
generated
vendored
@@ -1,3 +1,2 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
;
|
|
||||||
|
|||||||
39
node_modules/@actions/http-client/node_modules/tunnel/package.json
generated
vendored
39
node_modules/@actions/http-client/node_modules/tunnel/package.json
generated
vendored
@@ -1,25 +1,7 @@
|
|||||||
{
|
{
|
||||||
"author": {
|
"name": "tunnel",
|
||||||
"name": "Koichi Kobayashi",
|
"version": "0.0.6",
|
||||||
"email": "koichik@improvement.jp"
|
|
||||||
},
|
|
||||||
"bugs": {
|
|
||||||
"url": "https://github.com/koichik/node-tunnel/issues"
|
|
||||||
},
|
|
||||||
"bundleDependencies": false,
|
|
||||||
"deprecated": false,
|
|
||||||
"description": "Node HTTP/HTTPS Agents for tunneling proxies",
|
"description": "Node HTTP/HTTPS Agents for tunneling proxies",
|
||||||
"devDependencies": {
|
|
||||||
"mocha": "^5.2.0",
|
|
||||||
"should": "^13.2.3"
|
|
||||||
},
|
|
||||||
"directories": {
|
|
||||||
"lib": "./lib"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
|
||||||
},
|
|
||||||
"homepage": "https://github.com/koichik/node-tunnel/",
|
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"http",
|
"http",
|
||||||
"https",
|
"https",
|
||||||
@@ -27,15 +9,26 @@
|
|||||||
"proxy",
|
"proxy",
|
||||||
"tunnel"
|
"tunnel"
|
||||||
],
|
],
|
||||||
|
"homepage": "https://github.com/koichik/node-tunnel/",
|
||||||
|
"bugs": "https://github.com/koichik/node-tunnel/issues",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"author": "Koichi Kobayashi <koichik@improvement.jp>",
|
||||||
"main": "./index.js",
|
"main": "./index.js",
|
||||||
"name": "tunnel",
|
"directories": {
|
||||||
|
"lib": "./lib"
|
||||||
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/koichik/node-tunnel.git"
|
"url": "https://github.com/koichik/node-tunnel.git"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "mocha"
|
"test": "mocha"
|
||||||
},
|
},
|
||||||
"version": "0.0.6"
|
"devDependencies": {
|
||||||
|
"mocha": "^5.2.0",
|
||||||
|
"should": "^13.2.3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
62
node_modules/@actions/http-client/package.json
generated
vendored
62
node_modules/@actions/http-client/package.json
generated
vendored
@@ -1,39 +1,39 @@
|
|||||||
{
|
{
|
||||||
"author": {
|
|
||||||
"name": "GitHub, Inc."
|
|
||||||
},
|
|
||||||
"bugs": {
|
|
||||||
"url": "https://github.com/actions/http-client/issues"
|
|
||||||
},
|
|
||||||
"bundleDependencies": false,
|
|
||||||
"dependencies": {
|
|
||||||
"tunnel": "0.0.6"
|
|
||||||
},
|
|
||||||
"deprecated": false,
|
|
||||||
"description": "Actions Http Client",
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/jest": "^24.0.25",
|
|
||||||
"@types/node": "^12.12.24",
|
|
||||||
"jest": "^24.9.0",
|
|
||||||
"proxy": "^1.0.1",
|
|
||||||
"ts-jest": "^24.3.0",
|
|
||||||
"typescript": "^3.7.4"
|
|
||||||
},
|
|
||||||
"homepage": "https://github.com/actions/http-client#readme",
|
|
||||||
"keywords": [
|
|
||||||
"Actions",
|
|
||||||
"Http"
|
|
||||||
],
|
|
||||||
"license": "MIT",
|
|
||||||
"main": "index.js",
|
|
||||||
"name": "@actions/http-client",
|
"name": "@actions/http-client",
|
||||||
|
"version": "1.0.8",
|
||||||
|
"description": "Actions Http Client",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"build": "rm -Rf ./_out && tsc && cp package*.json ./_out && cp *.md ./_out && cp LICENSE ./_out && cp actions.png ./_out",
|
||||||
|
"test": "jest",
|
||||||
|
"format": "prettier --write *.ts && prettier --write **/*.ts",
|
||||||
|
"format-check": "prettier --check *.ts && prettier --check **/*.ts",
|
||||||
|
"audit-check": "npm audit --audit-level=moderate"
|
||||||
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/actions/http-client.git"
|
"url": "git+https://github.com/actions/http-client.git"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"keywords": [
|
||||||
"build": "rm -Rf ./_out && tsc && cp package*.json ./_out && cp *.md ./_out && cp LICENSE ./_out && cp actions.png ./_out",
|
"Actions",
|
||||||
"test": "jest"
|
"Http"
|
||||||
|
],
|
||||||
|
"author": "GitHub, Inc.",
|
||||||
|
"license": "MIT",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/actions/http-client/issues"
|
||||||
},
|
},
|
||||||
"version": "1.0.4"
|
"homepage": "https://github.com/actions/http-client#readme",
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/jest": "^25.1.4",
|
||||||
|
"@types/node": "^12.12.31",
|
||||||
|
"jest": "^25.1.0",
|
||||||
|
"prettier": "^2.0.4",
|
||||||
|
"proxy": "^1.0.1",
|
||||||
|
"ts-jest": "^25.2.1",
|
||||||
|
"typescript": "^3.8.3"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"tunnel": "0.0.6"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
13
node_modules/@actions/http-client/proxy.js
generated
vendored
13
node_modules/@actions/http-client/proxy.js
generated
vendored
@@ -9,12 +9,10 @@ function getProxyUrl(reqUrl) {
|
|||||||
}
|
}
|
||||||
let proxyVar;
|
let proxyVar;
|
||||||
if (usingSsl) {
|
if (usingSsl) {
|
||||||
proxyVar = process.env["https_proxy"] ||
|
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
||||||
process.env["HTTPS_PROXY"];
|
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
proxyVar = process.env["http_proxy"] ||
|
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
||||||
process.env["HTTP_PROXY"];
|
|
||||||
}
|
}
|
||||||
if (proxyVar) {
|
if (proxyVar) {
|
||||||
proxyUrl = url.parse(proxyVar);
|
proxyUrl = url.parse(proxyVar);
|
||||||
@@ -26,7 +24,7 @@ function checkBypass(reqUrl) {
|
|||||||
if (!reqUrl.hostname) {
|
if (!reqUrl.hostname) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
let noProxy = process.env["no_proxy"] || process.env["NO_PROXY"] || '';
|
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||||
if (!noProxy) {
|
if (!noProxy) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -47,7 +45,10 @@ function checkBypass(reqUrl) {
|
|||||||
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
||||||
}
|
}
|
||||||
// Compare request host against noproxy
|
// Compare request host against noproxy
|
||||||
for (let upperNoProxyItem of noProxy.split(',').map(x => x.trim().toUpperCase()).filter(x => x)) {
|
for (let upperNoProxyItem of noProxy
|
||||||
|
.split(',')
|
||||||
|
.map(x => x.trim().toUpperCase())
|
||||||
|
.filter(x => x)) {
|
||||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|||||||
63
node_modules/@ava/typescript/README.md
generated
vendored
Normal file
63
node_modules/@ava/typescript/README.md
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
# @ava/typescript
|
||||||
|
|
||||||
|
Adds rudimentary [TypeScript](https://www.typescriptlang.org/) support to [AVA](https://avajs.dev).
|
||||||
|
|
||||||
|
This is designed to work for projects that precompile TypeScript. It allows AVA to load the compiled JavaScript, while configuring AVA to treat the TypeScript files as test files.
|
||||||
|
|
||||||
|
In other words, say you have a test file at `src/test.ts`. You've configured TypeScript to output to `build/`. Using `@ava/typescript` you can run the test using `npx ava src/test.ts`.
|
||||||
|
|
||||||
|
## Enabling TypeScript support
|
||||||
|
|
||||||
|
Add this package to your project:
|
||||||
|
|
||||||
|
```console
|
||||||
|
npm install --save-dev @ava/typescript
|
||||||
|
```
|
||||||
|
|
||||||
|
Then, enable TypeScript support either in `package.json` or `ava.config.*`:
|
||||||
|
|
||||||
|
**`package.json`:**
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"ava": {
|
||||||
|
"typescript": {
|
||||||
|
"rewritePaths": {
|
||||||
|
"src/": "build/"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Both keys and values of the `rewritePaths` object must end with a `/`. Paths are relative to your project directory.
|
||||||
|
|
||||||
|
Output files are expected to have the `.js` extension.
|
||||||
|
|
||||||
|
AVA searches your entire project for `*.js`, `*.cjs`, `*.mjs` and `*.ts` files (or other extensions you've configured). It will ignore such files found in the `rewritePaths` targets (e.g. `build/`). If you use more specific paths, for instance `build/main/`, you may need to change AVA's `files` configuration to ignore other directories.
|
||||||
|
|
||||||
|
## Add additional extensions
|
||||||
|
|
||||||
|
You can configure AVA to recognize additional file extensions. To add (partial†) JSX support:
|
||||||
|
|
||||||
|
**`package.json`:**
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"ava": {
|
||||||
|
"typescript": {
|
||||||
|
"extensions": [
|
||||||
|
"ts",
|
||||||
|
"tsx"
|
||||||
|
],
|
||||||
|
"rewritePaths": {
|
||||||
|
"src/": "build/"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
See also AVA's [`extensions` option](https://github.com/avajs/ava/blob/master/docs/06-configuration.md#options).
|
||||||
|
|
||||||
|
† Note that the [*preserve* mode for JSX](https://www.typescriptlang.org/docs/handbook/jsx.html) is not (yet) supported.
|
||||||
136
node_modules/@ava/typescript/index.js
generated
vendored
Normal file
136
node_modules/@ava/typescript/index.js
generated
vendored
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
'use strict';
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const escapeStringRegexp = require('escape-string-regexp');
|
||||||
|
|
||||||
|
const pkg = require('./package.json');
|
||||||
|
|
||||||
|
function isPlainObject(x) {
|
||||||
|
return x !== null && typeof x === 'object' && Reflect.getPrototypeOf(x) === Object.prototype;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isValidExtensions(extensions) {
|
||||||
|
return Array.isArray(extensions) &&
|
||||||
|
extensions.length > 0 &&
|
||||||
|
extensions.every(ext => typeof ext === 'string' && ext !== '') &&
|
||||||
|
new Set(extensions).size === extensions.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isValidRewritePaths(rewritePaths) {
|
||||||
|
if (!isPlainObject(rewritePaths)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Object.entries(rewritePaths).every(([from, to]) => {
|
||||||
|
return from.endsWith('/') && typeof to === 'string' && to.endsWith('/');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = ({negotiateProtocol}) => {
|
||||||
|
const protocol = negotiateProtocol(['ava-3.2', 'ava-3'], {version: pkg.version});
|
||||||
|
if (protocol === null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
main({config}) {
|
||||||
|
let valid = false;
|
||||||
|
if (isPlainObject(config)) {
|
||||||
|
const keys = Object.keys(config);
|
||||||
|
if (keys.every(key => key === 'extensions' || key === 'rewritePaths')) {
|
||||||
|
valid =
|
||||||
|
(config.extensions === undefined || isValidExtensions(config.extensions)) &&
|
||||||
|
isValidRewritePaths(config.rewritePaths);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!valid) {
|
||||||
|
throw new Error(`Unexpected Typescript configuration for AVA. See https://github.com/avajs/typescript/blob/v${pkg.version}/README.md for allowed values.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const {
|
||||||
|
extensions = ['ts'],
|
||||||
|
rewritePaths: relativeRewritePaths
|
||||||
|
} = config;
|
||||||
|
|
||||||
|
const rewritePaths = Object.entries(relativeRewritePaths).map(([from, to]) => [
|
||||||
|
path.join(protocol.projectDir, from),
|
||||||
|
path.join(protocol.projectDir, to)
|
||||||
|
]);
|
||||||
|
const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
async compile() {
|
||||||
|
return {
|
||||||
|
extensions: extensions.slice(),
|
||||||
|
rewritePaths: rewritePaths.slice()
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
get extensions() {
|
||||||
|
return extensions.slice();
|
||||||
|
},
|
||||||
|
|
||||||
|
ignoreChange(filePath) {
|
||||||
|
if (!testFileExtension.test(filePath)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return rewritePaths.some(([from]) => filePath.startsWith(from));
|
||||||
|
},
|
||||||
|
|
||||||
|
resolveTestFile(testfile) {
|
||||||
|
if (!testFileExtension.test(testfile)) {
|
||||||
|
return testfile;
|
||||||
|
}
|
||||||
|
|
||||||
|
const rewrite = rewritePaths.find(([from]) => testfile.startsWith(from));
|
||||||
|
if (rewrite === undefined) {
|
||||||
|
return testfile;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [from, to] = rewrite;
|
||||||
|
// TODO: Support JSX preserve mode — https://www.typescriptlang.org/docs/handbook/jsx.html
|
||||||
|
return `${to}${testfile.slice(from.length)}`.replace(testFileExtension, '.js');
|
||||||
|
},
|
||||||
|
|
||||||
|
updateGlobs({filePatterns, ignoredByWatcherPatterns}) {
|
||||||
|
return {
|
||||||
|
filePatterns: [
|
||||||
|
...filePatterns,
|
||||||
|
'!**/*.d.ts',
|
||||||
|
...Object.values(relativeRewritePaths).map(to => `!${to}**`)
|
||||||
|
],
|
||||||
|
ignoredByWatcherPatterns: [
|
||||||
|
...ignoredByWatcherPatterns,
|
||||||
|
...Object.values(relativeRewritePaths).map(to => `${to}**/*.js.map`)
|
||||||
|
]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
worker({extensionsToLoadAsModules, state: {extensions, rewritePaths}}) {
|
||||||
|
const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
canLoad(ref) {
|
||||||
|
return testFileExtension.test(ref) && rewritePaths.some(([from]) => ref.startsWith(from));
|
||||||
|
},
|
||||||
|
|
||||||
|
async load(ref, {requireFn}) {
|
||||||
|
for (const extension of extensionsToLoadAsModules) {
|
||||||
|
if (ref.endsWith(`.${extension}`)) {
|
||||||
|
throw new Error('@ava/typescript cannot yet load ESM files');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const [from, to] = rewritePaths.find(([from]) => ref.startsWith(from));
|
||||||
|
// TODO: Support JSX preserve mode — https://www.typescriptlang.org/docs/handbook/jsx.html
|
||||||
|
const rewritten = `${to}${ref.slice(from.length)}`.replace(testFileExtension, '.js');
|
||||||
|
return requireFn(rewritten);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
18
node_modules/@ava/typescript/node_modules/escape-string-regexp/index.d.ts
generated
vendored
Normal file
18
node_modules/@ava/typescript/node_modules/escape-string-regexp/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
/**
|
||||||
|
Escape RegExp special characters.
|
||||||
|
|
||||||
|
You can also use this to escape a string that is inserted into the middle of a regex, for example, into a character class.
|
||||||
|
|
||||||
|
@example
|
||||||
|
```
|
||||||
|
import escapeStringRegexp = require('escape-string-regexp');
|
||||||
|
|
||||||
|
const escapedString = escapeStringRegexp('How much $ for a 🦄?');
|
||||||
|
//=> 'How much \\$ for a 🦄\\?'
|
||||||
|
|
||||||
|
new RegExp(escapedString);
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
declare const escapeStringRegexp: (string: string) => string;
|
||||||
|
|
||||||
|
export = escapeStringRegexp;
|
||||||
11
node_modules/@ava/typescript/node_modules/escape-string-regexp/index.js
generated
vendored
Normal file
11
node_modules/@ava/typescript/node_modules/escape-string-regexp/index.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
const matchOperatorsRegex = /[|\\{}()[\]^$+*?.-]/g;
|
||||||
|
|
||||||
|
module.exports = string => {
|
||||||
|
if (typeof string !== 'string') {
|
||||||
|
throw new TypeError('Expected a string');
|
||||||
|
}
|
||||||
|
|
||||||
|
return string.replace(matchOperatorsRegex, '\\$&');
|
||||||
|
};
|
||||||
43
node_modules/@ava/typescript/node_modules/escape-string-regexp/package.json
generated
vendored
Normal file
43
node_modules/@ava/typescript/node_modules/escape-string-regexp/package.json
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
{
|
||||||
|
"name": "escape-string-regexp",
|
||||||
|
"version": "2.0.0",
|
||||||
|
"description": "Escape RegExp special characters",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "sindresorhus/escape-string-regexp",
|
||||||
|
"author": {
|
||||||
|
"name": "Sindre Sorhus",
|
||||||
|
"email": "sindresorhus@gmail.com",
|
||||||
|
"url": "sindresorhus.com"
|
||||||
|
},
|
||||||
|
"maintainers": [
|
||||||
|
"Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)",
|
||||||
|
"Joshua Boy Nicolai Appelman <joshua@jbna.nl> (jbna.nl)"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "xo && ava && tsd"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js",
|
||||||
|
"index.d.ts"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"escape",
|
||||||
|
"regex",
|
||||||
|
"regexp",
|
||||||
|
"re",
|
||||||
|
"regular",
|
||||||
|
"expression",
|
||||||
|
"string",
|
||||||
|
"str",
|
||||||
|
"special",
|
||||||
|
"characters"
|
||||||
|
],
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "^1.4.1",
|
||||||
|
"tsd": "^0.7.2",
|
||||||
|
"xo": "^0.24.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
29
node_modules/@ava/typescript/node_modules/escape-string-regexp/readme.md
generated
vendored
Normal file
29
node_modules/@ava/typescript/node_modules/escape-string-regexp/readme.md
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
# escape-string-regexp [](https://travis-ci.org/sindresorhus/escape-string-regexp)
|
||||||
|
|
||||||
|
> Escape RegExp special characters
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install escape-string-regexp
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
const escapeStringRegexp = require('escape-string-regexp');
|
||||||
|
|
||||||
|
const escapedString = escapeStringRegexp('How much $ for a 🦄?');
|
||||||
|
//=> 'How much \\$ for a 🦄\\?'
|
||||||
|
|
||||||
|
new RegExp(escapedString);
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also use this to escape a string that is inserted into the middle of a regex, for example, into a character class.
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||||
42
node_modules/@ava/typescript/package.json
generated
vendored
Normal file
42
node_modules/@ava/typescript/package.json
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
{
|
||||||
|
"name": "@ava/typescript",
|
||||||
|
"version": "1.1.1",
|
||||||
|
"description": "TypeScript provider for AVA",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10.18.0 <11 || >=12.14.0 <13 || >=13.5.0"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"author": "Mark Wubben (https://novemberborn.net)",
|
||||||
|
"repository": "avajs/typescript",
|
||||||
|
"license": "MIT",
|
||||||
|
"keywords": [
|
||||||
|
"ava",
|
||||||
|
"typescript"
|
||||||
|
],
|
||||||
|
"scripts": {
|
||||||
|
"test": "xo && nyc ava"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"escape-string-regexp": "^2.0.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "^3.0.0",
|
||||||
|
"execa": "^4.0.0",
|
||||||
|
"nyc": "^15.0.0",
|
||||||
|
"xo": "^0.25.3"
|
||||||
|
},
|
||||||
|
"nyc": {
|
||||||
|
"reporter": [
|
||||||
|
"html",
|
||||||
|
"lcov",
|
||||||
|
"text"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"xo": {
|
||||||
|
"rules": {
|
||||||
|
"import/order": "off"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
22
node_modules/@babel/core/LICENSE
generated
vendored
22
node_modules/@babel/core/LICENSE
generated
vendored
@@ -1,22 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining
|
|
||||||
a copy of this software and associated documentation files (the
|
|
||||||
"Software"), to deal in the Software without restriction, including
|
|
||||||
without limitation the rights to use, copy, modify, merge, publish,
|
|
||||||
distribute, sublicense, and/or sell copies of the Software, and to
|
|
||||||
permit persons to whom the Software is furnished to do so, subject to
|
|
||||||
the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be
|
|
||||||
included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
||||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
||||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
||||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
19
node_modules/@babel/core/README.md
generated
vendored
19
node_modules/@babel/core/README.md
generated
vendored
@@ -1,19 +0,0 @@
|
|||||||
# @babel/core
|
|
||||||
|
|
||||||
> Babel compiler core.
|
|
||||||
|
|
||||||
See our website [@babel/core](https://babeljs.io/docs/en/next/babel-core.html) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20core%22+is%3Aopen) associated with this package.
|
|
||||||
|
|
||||||
## Install
|
|
||||||
|
|
||||||
Using npm:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
npm install --save-dev @babel/core
|
|
||||||
```
|
|
||||||
|
|
||||||
or using yarn:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
yarn add @babel/core --dev
|
|
||||||
```
|
|
||||||
198
node_modules/@babel/core/lib/config/caching.js
generated
vendored
198
node_modules/@babel/core/lib/config/caching.js
generated
vendored
@@ -1,198 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.makeStrongCache = makeStrongCache;
|
|
||||||
exports.makeWeakCache = makeWeakCache;
|
|
||||||
exports.assertSimpleType = assertSimpleType;
|
|
||||||
|
|
||||||
function makeStrongCache(handler) {
|
|
||||||
return makeCachedFunction(new Map(), handler);
|
|
||||||
}
|
|
||||||
|
|
||||||
function makeWeakCache(handler) {
|
|
||||||
return makeCachedFunction(new WeakMap(), handler);
|
|
||||||
}
|
|
||||||
|
|
||||||
function makeCachedFunction(callCache, handler) {
|
|
||||||
return function cachedFunction(arg, data) {
|
|
||||||
let cachedValue = callCache.get(arg);
|
|
||||||
|
|
||||||
if (cachedValue) {
|
|
||||||
for (const {
|
|
||||||
value,
|
|
||||||
valid
|
|
||||||
} of cachedValue) {
|
|
||||||
if (valid(data)) return value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const cache = new CacheConfigurator(data);
|
|
||||||
const value = handler(arg, cache);
|
|
||||||
if (!cache.configured()) cache.forever();
|
|
||||||
cache.deactivate();
|
|
||||||
|
|
||||||
switch (cache.mode()) {
|
|
||||||
case "forever":
|
|
||||||
cachedValue = [{
|
|
||||||
value,
|
|
||||||
valid: () => true
|
|
||||||
}];
|
|
||||||
callCache.set(arg, cachedValue);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case "invalidate":
|
|
||||||
cachedValue = [{
|
|
||||||
value,
|
|
||||||
valid: cache.validator()
|
|
||||||
}];
|
|
||||||
callCache.set(arg, cachedValue);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case "valid":
|
|
||||||
if (cachedValue) {
|
|
||||||
cachedValue.push({
|
|
||||||
value,
|
|
||||||
valid: cache.validator()
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
cachedValue = [{
|
|
||||||
value,
|
|
||||||
valid: cache.validator()
|
|
||||||
}];
|
|
||||||
callCache.set(arg, cachedValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
return value;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
class CacheConfigurator {
|
|
||||||
constructor(data) {
|
|
||||||
this._active = true;
|
|
||||||
this._never = false;
|
|
||||||
this._forever = false;
|
|
||||||
this._invalidate = false;
|
|
||||||
this._configured = false;
|
|
||||||
this._pairs = [];
|
|
||||||
this._data = data;
|
|
||||||
}
|
|
||||||
|
|
||||||
simple() {
|
|
||||||
return makeSimpleConfigurator(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
mode() {
|
|
||||||
if (this._never) return "never";
|
|
||||||
if (this._forever) return "forever";
|
|
||||||
if (this._invalidate) return "invalidate";
|
|
||||||
return "valid";
|
|
||||||
}
|
|
||||||
|
|
||||||
forever() {
|
|
||||||
if (!this._active) {
|
|
||||||
throw new Error("Cannot change caching after evaluation has completed.");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this._never) {
|
|
||||||
throw new Error("Caching has already been configured with .never()");
|
|
||||||
}
|
|
||||||
|
|
||||||
this._forever = true;
|
|
||||||
this._configured = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
never() {
|
|
||||||
if (!this._active) {
|
|
||||||
throw new Error("Cannot change caching after evaluation has completed.");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this._forever) {
|
|
||||||
throw new Error("Caching has already been configured with .forever()");
|
|
||||||
}
|
|
||||||
|
|
||||||
this._never = true;
|
|
||||||
this._configured = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
using(handler) {
|
|
||||||
if (!this._active) {
|
|
||||||
throw new Error("Cannot change caching after evaluation has completed.");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this._never || this._forever) {
|
|
||||||
throw new Error("Caching has already been configured with .never or .forever()");
|
|
||||||
}
|
|
||||||
|
|
||||||
this._configured = true;
|
|
||||||
const key = handler(this._data);
|
|
||||||
|
|
||||||
this._pairs.push([key, handler]);
|
|
||||||
|
|
||||||
return key;
|
|
||||||
}
|
|
||||||
|
|
||||||
invalidate(handler) {
|
|
||||||
if (!this._active) {
|
|
||||||
throw new Error("Cannot change caching after evaluation has completed.");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this._never || this._forever) {
|
|
||||||
throw new Error("Caching has already been configured with .never or .forever()");
|
|
||||||
}
|
|
||||||
|
|
||||||
this._invalidate = true;
|
|
||||||
this._configured = true;
|
|
||||||
const key = handler(this._data);
|
|
||||||
|
|
||||||
this._pairs.push([key, handler]);
|
|
||||||
|
|
||||||
return key;
|
|
||||||
}
|
|
||||||
|
|
||||||
validator() {
|
|
||||||
const pairs = this._pairs;
|
|
||||||
return data => pairs.every(([key, fn]) => key === fn(data));
|
|
||||||
}
|
|
||||||
|
|
||||||
deactivate() {
|
|
||||||
this._active = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
configured() {
|
|
||||||
return this._configured;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
function makeSimpleConfigurator(cache) {
|
|
||||||
function cacheFn(val) {
|
|
||||||
if (typeof val === "boolean") {
|
|
||||||
if (val) cache.forever();else cache.never();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
return cache.using(() => assertSimpleType(val()));
|
|
||||||
}
|
|
||||||
|
|
||||||
cacheFn.forever = () => cache.forever();
|
|
||||||
|
|
||||||
cacheFn.never = () => cache.never();
|
|
||||||
|
|
||||||
cacheFn.using = cb => cache.using(() => assertSimpleType(cb()));
|
|
||||||
|
|
||||||
cacheFn.invalidate = cb => cache.invalidate(() => assertSimpleType(cb()));
|
|
||||||
|
|
||||||
return cacheFn;
|
|
||||||
}
|
|
||||||
|
|
||||||
function assertSimpleType(value) {
|
|
||||||
if (value != null && typeof value !== "string" && typeof value !== "boolean" && typeof value !== "number") {
|
|
||||||
throw new Error("Cache keys must be either string, boolean, number, null, or undefined.");
|
|
||||||
}
|
|
||||||
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
439
node_modules/@babel/core/lib/config/config-chain.js
generated
vendored
439
node_modules/@babel/core/lib/config/config-chain.js
generated
vendored
@@ -1,439 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.buildPresetChain = buildPresetChain;
|
|
||||||
exports.buildRootChain = buildRootChain;
|
|
||||||
exports.buildPresetChainWalker = void 0;
|
|
||||||
|
|
||||||
function _path() {
|
|
||||||
const data = _interopRequireDefault(require("path"));
|
|
||||||
|
|
||||||
_path = function () {
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
function _debug() {
|
|
||||||
const data = _interopRequireDefault(require("debug"));
|
|
||||||
|
|
||||||
_debug = function () {
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
var _options = require("./validation/options");
|
|
||||||
|
|
||||||
var _patternToRegex = _interopRequireDefault(require("./pattern-to-regex"));
|
|
||||||
|
|
||||||
var _files = require("./files");
|
|
||||||
|
|
||||||
var _caching = require("./caching");
|
|
||||||
|
|
||||||
var _configDescriptors = require("./config-descriptors");
|
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
||||||
|
|
||||||
const debug = (0, _debug().default)("babel:config:config-chain");
|
|
||||||
|
|
||||||
function buildPresetChain(arg, context) {
|
|
||||||
const chain = buildPresetChainWalker(arg, context);
|
|
||||||
if (!chain) return null;
|
|
||||||
return {
|
|
||||||
plugins: dedupDescriptors(chain.plugins),
|
|
||||||
presets: dedupDescriptors(chain.presets),
|
|
||||||
options: chain.options.map(o => normalizeOptions(o))
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const buildPresetChainWalker = makeChainWalker({
|
|
||||||
init: arg => arg,
|
|
||||||
root: preset => loadPresetDescriptors(preset),
|
|
||||||
env: (preset, envName) => loadPresetEnvDescriptors(preset)(envName),
|
|
||||||
overrides: (preset, index) => loadPresetOverridesDescriptors(preset)(index),
|
|
||||||
overridesEnv: (preset, index, envName) => loadPresetOverridesEnvDescriptors(preset)(index)(envName)
|
|
||||||
});
|
|
||||||
exports.buildPresetChainWalker = buildPresetChainWalker;
|
|
||||||
const loadPresetDescriptors = (0, _caching.makeWeakCache)(preset => buildRootDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors));
|
|
||||||
const loadPresetEnvDescriptors = (0, _caching.makeWeakCache)(preset => (0, _caching.makeStrongCache)(envName => buildEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, envName)));
|
|
||||||
const loadPresetOverridesDescriptors = (0, _caching.makeWeakCache)(preset => (0, _caching.makeStrongCache)(index => buildOverrideDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index)));
|
|
||||||
const loadPresetOverridesEnvDescriptors = (0, _caching.makeWeakCache)(preset => (0, _caching.makeStrongCache)(index => (0, _caching.makeStrongCache)(envName => buildOverrideEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index, envName))));
|
|
||||||
|
|
||||||
function buildRootChain(opts, context) {
|
|
||||||
const programmaticChain = loadProgrammaticChain({
|
|
||||||
options: opts,
|
|
||||||
dirname: context.cwd
|
|
||||||
}, context);
|
|
||||||
if (!programmaticChain) return null;
|
|
||||||
let configFile;
|
|
||||||
|
|
||||||
if (typeof opts.configFile === "string") {
|
|
||||||
configFile = (0, _files.loadConfig)(opts.configFile, context.cwd, context.envName, context.caller);
|
|
||||||
} else if (opts.configFile !== false) {
|
|
||||||
configFile = (0, _files.findRootConfig)(context.root, context.envName, context.caller);
|
|
||||||
}
|
|
||||||
|
|
||||||
let {
|
|
||||||
babelrc,
|
|
||||||
babelrcRoots
|
|
||||||
} = opts;
|
|
||||||
let babelrcRootsDirectory = context.cwd;
|
|
||||||
const configFileChain = emptyChain();
|
|
||||||
|
|
||||||
if (configFile) {
|
|
||||||
const validatedFile = validateConfigFile(configFile);
|
|
||||||
const result = loadFileChain(validatedFile, context);
|
|
||||||
if (!result) return null;
|
|
||||||
|
|
||||||
if (babelrc === undefined) {
|
|
||||||
babelrc = validatedFile.options.babelrc;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (babelrcRoots === undefined) {
|
|
||||||
babelrcRootsDirectory = validatedFile.dirname;
|
|
||||||
babelrcRoots = validatedFile.options.babelrcRoots;
|
|
||||||
}
|
|
||||||
|
|
||||||
mergeChain(configFileChain, result);
|
|
||||||
}
|
|
||||||
|
|
||||||
const pkgData = typeof context.filename === "string" ? (0, _files.findPackageData)(context.filename) : null;
|
|
||||||
let ignoreFile, babelrcFile;
|
|
||||||
const fileChain = emptyChain();
|
|
||||||
|
|
||||||
if ((babelrc === true || babelrc === undefined) && pkgData && babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory)) {
|
|
||||||
({
|
|
||||||
ignore: ignoreFile,
|
|
||||||
config: babelrcFile
|
|
||||||
} = (0, _files.findRelativeConfig)(pkgData, context.envName, context.caller));
|
|
||||||
|
|
||||||
if (ignoreFile && shouldIgnore(context, ignoreFile.ignore, null, ignoreFile.dirname)) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (babelrcFile) {
|
|
||||||
const result = loadFileChain(validateBabelrcFile(babelrcFile), context);
|
|
||||||
if (!result) return null;
|
|
||||||
mergeChain(fileChain, result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const chain = mergeChain(mergeChain(mergeChain(emptyChain(), configFileChain), fileChain), programmaticChain);
|
|
||||||
return {
|
|
||||||
plugins: dedupDescriptors(chain.plugins),
|
|
||||||
presets: dedupDescriptors(chain.presets),
|
|
||||||
options: chain.options.map(o => normalizeOptions(o)),
|
|
||||||
ignore: ignoreFile || undefined,
|
|
||||||
babelrc: babelrcFile || undefined,
|
|
||||||
config: configFile || undefined
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory) {
|
|
||||||
if (typeof babelrcRoots === "boolean") return babelrcRoots;
|
|
||||||
const absoluteRoot = context.root;
|
|
||||||
|
|
||||||
if (babelrcRoots === undefined) {
|
|
||||||
return pkgData.directories.indexOf(absoluteRoot) !== -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
let babelrcPatterns = babelrcRoots;
|
|
||||||
if (!Array.isArray(babelrcPatterns)) babelrcPatterns = [babelrcPatterns];
|
|
||||||
babelrcPatterns = babelrcPatterns.map(pat => {
|
|
||||||
return typeof pat === "string" ? _path().default.resolve(babelrcRootsDirectory, pat) : pat;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (babelrcPatterns.length === 1 && babelrcPatterns[0] === absoluteRoot) {
|
|
||||||
return pkgData.directories.indexOf(absoluteRoot) !== -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
return babelrcPatterns.some(pat => {
|
|
||||||
if (typeof pat === "string") {
|
|
||||||
pat = (0, _patternToRegex.default)(pat, babelrcRootsDirectory);
|
|
||||||
}
|
|
||||||
|
|
||||||
return pkgData.directories.some(directory => {
|
|
||||||
return matchPattern(pat, babelrcRootsDirectory, directory, context);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const validateConfigFile = (0, _caching.makeWeakCache)(file => ({
|
|
||||||
filepath: file.filepath,
|
|
||||||
dirname: file.dirname,
|
|
||||||
options: (0, _options.validate)("configfile", file.options)
|
|
||||||
}));
|
|
||||||
const validateBabelrcFile = (0, _caching.makeWeakCache)(file => ({
|
|
||||||
filepath: file.filepath,
|
|
||||||
dirname: file.dirname,
|
|
||||||
options: (0, _options.validate)("babelrcfile", file.options)
|
|
||||||
}));
|
|
||||||
const validateExtendFile = (0, _caching.makeWeakCache)(file => ({
|
|
||||||
filepath: file.filepath,
|
|
||||||
dirname: file.dirname,
|
|
||||||
options: (0, _options.validate)("extendsfile", file.options)
|
|
||||||
}));
|
|
||||||
const loadProgrammaticChain = makeChainWalker({
|
|
||||||
root: input => buildRootDescriptors(input, "base", _configDescriptors.createCachedDescriptors),
|
|
||||||
env: (input, envName) => buildEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, envName),
|
|
||||||
overrides: (input, index) => buildOverrideDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index),
|
|
||||||
overridesEnv: (input, index, envName) => buildOverrideEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index, envName)
|
|
||||||
});
|
|
||||||
const loadFileChain = makeChainWalker({
|
|
||||||
root: file => loadFileDescriptors(file),
|
|
||||||
env: (file, envName) => loadFileEnvDescriptors(file)(envName),
|
|
||||||
overrides: (file, index) => loadFileOverridesDescriptors(file)(index),
|
|
||||||
overridesEnv: (file, index, envName) => loadFileOverridesEnvDescriptors(file)(index)(envName)
|
|
||||||
});
|
|
||||||
const loadFileDescriptors = (0, _caching.makeWeakCache)(file => buildRootDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors));
|
|
||||||
const loadFileEnvDescriptors = (0, _caching.makeWeakCache)(file => (0, _caching.makeStrongCache)(envName => buildEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, envName)));
|
|
||||||
const loadFileOverridesDescriptors = (0, _caching.makeWeakCache)(file => (0, _caching.makeStrongCache)(index => buildOverrideDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index)));
|
|
||||||
const loadFileOverridesEnvDescriptors = (0, _caching.makeWeakCache)(file => (0, _caching.makeStrongCache)(index => (0, _caching.makeStrongCache)(envName => buildOverrideEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index, envName))));
|
|
||||||
|
|
||||||
function buildRootDescriptors({
|
|
||||||
dirname,
|
|
||||||
options
|
|
||||||
}, alias, descriptors) {
|
|
||||||
return descriptors(dirname, options, alias);
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildEnvDescriptors({
|
|
||||||
dirname,
|
|
||||||
options
|
|
||||||
}, alias, descriptors, envName) {
|
|
||||||
const opts = options.env && options.env[envName];
|
|
||||||
return opts ? descriptors(dirname, opts, `${alias}.env["${envName}"]`) : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildOverrideDescriptors({
|
|
||||||
dirname,
|
|
||||||
options
|
|
||||||
}, alias, descriptors, index) {
|
|
||||||
const opts = options.overrides && options.overrides[index];
|
|
||||||
if (!opts) throw new Error("Assertion failure - missing override");
|
|
||||||
return descriptors(dirname, opts, `${alias}.overrides[${index}]`);
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildOverrideEnvDescriptors({
|
|
||||||
dirname,
|
|
||||||
options
|
|
||||||
}, alias, descriptors, index, envName) {
|
|
||||||
const override = options.overrides && options.overrides[index];
|
|
||||||
if (!override) throw new Error("Assertion failure - missing override");
|
|
||||||
const opts = override.env && override.env[envName];
|
|
||||||
return opts ? descriptors(dirname, opts, `${alias}.overrides[${index}].env["${envName}"]`) : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function makeChainWalker({
|
|
||||||
root,
|
|
||||||
env,
|
|
||||||
overrides,
|
|
||||||
overridesEnv
|
|
||||||
}) {
|
|
||||||
return (input, context, files = new Set()) => {
|
|
||||||
const {
|
|
||||||
dirname
|
|
||||||
} = input;
|
|
||||||
const flattenedConfigs = [];
|
|
||||||
const rootOpts = root(input);
|
|
||||||
|
|
||||||
if (configIsApplicable(rootOpts, dirname, context)) {
|
|
||||||
flattenedConfigs.push(rootOpts);
|
|
||||||
const envOpts = env(input, context.envName);
|
|
||||||
|
|
||||||
if (envOpts && configIsApplicable(envOpts, dirname, context)) {
|
|
||||||
flattenedConfigs.push(envOpts);
|
|
||||||
}
|
|
||||||
|
|
||||||
(rootOpts.options.overrides || []).forEach((_, index) => {
|
|
||||||
const overrideOps = overrides(input, index);
|
|
||||||
|
|
||||||
if (configIsApplicable(overrideOps, dirname, context)) {
|
|
||||||
flattenedConfigs.push(overrideOps);
|
|
||||||
const overrideEnvOpts = overridesEnv(input, index, context.envName);
|
|
||||||
|
|
||||||
if (overrideEnvOpts && configIsApplicable(overrideEnvOpts, dirname, context)) {
|
|
||||||
flattenedConfigs.push(overrideEnvOpts);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (flattenedConfigs.some(({
|
|
||||||
options: {
|
|
||||||
ignore,
|
|
||||||
only
|
|
||||||
}
|
|
||||||
}) => shouldIgnore(context, ignore, only, dirname))) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const chain = emptyChain();
|
|
||||||
|
|
||||||
for (const op of flattenedConfigs) {
|
|
||||||
if (!mergeExtendsChain(chain, op.options, dirname, context, files)) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
mergeChainOpts(chain, op);
|
|
||||||
}
|
|
||||||
|
|
||||||
return chain;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function mergeExtendsChain(chain, opts, dirname, context, files) {
|
|
||||||
if (opts.extends === undefined) return true;
|
|
||||||
const file = (0, _files.loadConfig)(opts.extends, dirname, context.envName, context.caller);
|
|
||||||
|
|
||||||
if (files.has(file)) {
|
|
||||||
throw new Error(`Configuration cycle detected loading ${file.filepath}.\n` + `File already loaded following the config chain:\n` + Array.from(files, file => ` - ${file.filepath}`).join("\n"));
|
|
||||||
}
|
|
||||||
|
|
||||||
files.add(file);
|
|
||||||
const fileChain = loadFileChain(validateExtendFile(file), context, files);
|
|
||||||
files.delete(file);
|
|
||||||
if (!fileChain) return false;
|
|
||||||
mergeChain(chain, fileChain);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
function mergeChain(target, source) {
|
|
||||||
target.options.push(...source.options);
|
|
||||||
target.plugins.push(...source.plugins);
|
|
||||||
target.presets.push(...source.presets);
|
|
||||||
return target;
|
|
||||||
}
|
|
||||||
|
|
||||||
function mergeChainOpts(target, {
|
|
||||||
options,
|
|
||||||
plugins,
|
|
||||||
presets
|
|
||||||
}) {
|
|
||||||
target.options.push(options);
|
|
||||||
target.plugins.push(...plugins());
|
|
||||||
target.presets.push(...presets());
|
|
||||||
return target;
|
|
||||||
}
|
|
||||||
|
|
||||||
function emptyChain() {
|
|
||||||
return {
|
|
||||||
options: [],
|
|
||||||
presets: [],
|
|
||||||
plugins: []
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeOptions(opts) {
|
|
||||||
const options = Object.assign({}, opts);
|
|
||||||
delete options.extends;
|
|
||||||
delete options.env;
|
|
||||||
delete options.overrides;
|
|
||||||
delete options.plugins;
|
|
||||||
delete options.presets;
|
|
||||||
delete options.passPerPreset;
|
|
||||||
delete options.ignore;
|
|
||||||
delete options.only;
|
|
||||||
delete options.test;
|
|
||||||
delete options.include;
|
|
||||||
delete options.exclude;
|
|
||||||
|
|
||||||
if (Object.prototype.hasOwnProperty.call(options, "sourceMap")) {
|
|
||||||
options.sourceMaps = options.sourceMap;
|
|
||||||
delete options.sourceMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
return options;
|
|
||||||
}
|
|
||||||
|
|
||||||
function dedupDescriptors(items) {
|
|
||||||
const map = new Map();
|
|
||||||
const descriptors = [];
|
|
||||||
|
|
||||||
for (const item of items) {
|
|
||||||
if (typeof item.value === "function") {
|
|
||||||
const fnKey = item.value;
|
|
||||||
let nameMap = map.get(fnKey);
|
|
||||||
|
|
||||||
if (!nameMap) {
|
|
||||||
nameMap = new Map();
|
|
||||||
map.set(fnKey, nameMap);
|
|
||||||
}
|
|
||||||
|
|
||||||
let desc = nameMap.get(item.name);
|
|
||||||
|
|
||||||
if (!desc) {
|
|
||||||
desc = {
|
|
||||||
value: item
|
|
||||||
};
|
|
||||||
descriptors.push(desc);
|
|
||||||
if (!item.ownPass) nameMap.set(item.name, desc);
|
|
||||||
} else {
|
|
||||||
desc.value = item;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
descriptors.push({
|
|
||||||
value: item
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return descriptors.reduce((acc, desc) => {
|
|
||||||
acc.push(desc.value);
|
|
||||||
return acc;
|
|
||||||
}, []);
|
|
||||||
}
|
|
||||||
|
|
||||||
function configIsApplicable({
|
|
||||||
options
|
|
||||||
}, dirname, context) {
|
|
||||||
return (options.test === undefined || configFieldIsApplicable(context, options.test, dirname)) && (options.include === undefined || configFieldIsApplicable(context, options.include, dirname)) && (options.exclude === undefined || !configFieldIsApplicable(context, options.exclude, dirname));
|
|
||||||
}
|
|
||||||
|
|
||||||
function configFieldIsApplicable(context, test, dirname) {
|
|
||||||
const patterns = Array.isArray(test) ? test : [test];
|
|
||||||
return matchesPatterns(context, patterns, dirname);
|
|
||||||
}
|
|
||||||
|
|
||||||
function shouldIgnore(context, ignore, only, dirname) {
|
|
||||||
if (ignore && matchesPatterns(context, ignore, dirname)) {
|
|
||||||
debug("Ignored %o because it matched one of %O from %o", context.filename, ignore, dirname);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (only && !matchesPatterns(context, only, dirname)) {
|
|
||||||
debug("Ignored %o because it failed to match one of %O from %o", context.filename, only, dirname);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
function matchesPatterns(context, patterns, dirname) {
|
|
||||||
return patterns.some(pattern => matchPattern(pattern, dirname, context.filename, context));
|
|
||||||
}
|
|
||||||
|
|
||||||
function matchPattern(pattern, dirname, pathToTest, context) {
|
|
||||||
if (typeof pattern === "function") {
|
|
||||||
return !!pattern(pathToTest, {
|
|
||||||
dirname,
|
|
||||||
envName: context.envName,
|
|
||||||
caller: context.caller
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof pathToTest !== "string") {
|
|
||||||
throw new Error(`Configuration contains string/RegExp pattern, but no filename was passed to Babel`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof pattern === "string") {
|
|
||||||
pattern = (0, _patternToRegex.default)(pattern, dirname);
|
|
||||||
}
|
|
||||||
|
|
||||||
return pattern.test(pathToTest);
|
|
||||||
}
|
|
||||||
211
node_modules/@babel/core/lib/config/config-descriptors.js
generated
vendored
211
node_modules/@babel/core/lib/config/config-descriptors.js
generated
vendored
@@ -1,211 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.createCachedDescriptors = createCachedDescriptors;
|
|
||||||
exports.createUncachedDescriptors = createUncachedDescriptors;
|
|
||||||
exports.createDescriptor = createDescriptor;
|
|
||||||
|
|
||||||
var _files = require("./files");
|
|
||||||
|
|
||||||
var _item = require("./item");
|
|
||||||
|
|
||||||
var _caching = require("./caching");
|
|
||||||
|
|
||||||
function isEqualDescriptor(a, b) {
|
|
||||||
return a.name === b.name && a.value === b.value && a.options === b.options && a.dirname === b.dirname && a.alias === b.alias && a.ownPass === b.ownPass && (a.file && a.file.request) === (b.file && b.file.request) && (a.file && a.file.resolved) === (b.file && b.file.resolved);
|
|
||||||
}
|
|
||||||
|
|
||||||
function createCachedDescriptors(dirname, options, alias) {
|
|
||||||
const {
|
|
||||||
plugins,
|
|
||||||
presets,
|
|
||||||
passPerPreset
|
|
||||||
} = options;
|
|
||||||
return {
|
|
||||||
options,
|
|
||||||
plugins: plugins ? () => createCachedPluginDescriptors(plugins, dirname)(alias) : () => [],
|
|
||||||
presets: presets ? () => createCachedPresetDescriptors(presets, dirname)(alias)(!!passPerPreset) : () => []
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function createUncachedDescriptors(dirname, options, alias) {
|
|
||||||
let plugins;
|
|
||||||
let presets;
|
|
||||||
return {
|
|
||||||
options,
|
|
||||||
plugins: () => {
|
|
||||||
if (!plugins) {
|
|
||||||
plugins = createPluginDescriptors(options.plugins || [], dirname, alias);
|
|
||||||
}
|
|
||||||
|
|
||||||
return plugins;
|
|
||||||
},
|
|
||||||
presets: () => {
|
|
||||||
if (!presets) {
|
|
||||||
presets = createPresetDescriptors(options.presets || [], dirname, alias, !!options.passPerPreset);
|
|
||||||
}
|
|
||||||
|
|
||||||
return presets;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const PRESET_DESCRIPTOR_CACHE = new WeakMap();
|
|
||||||
const createCachedPresetDescriptors = (0, _caching.makeWeakCache)((items, cache) => {
|
|
||||||
const dirname = cache.using(dir => dir);
|
|
||||||
return (0, _caching.makeStrongCache)(alias => (0, _caching.makeStrongCache)(passPerPreset => createPresetDescriptors(items, dirname, alias, passPerPreset).map(desc => loadCachedDescriptor(PRESET_DESCRIPTOR_CACHE, desc))));
|
|
||||||
});
|
|
||||||
const PLUGIN_DESCRIPTOR_CACHE = new WeakMap();
|
|
||||||
const createCachedPluginDescriptors = (0, _caching.makeWeakCache)((items, cache) => {
|
|
||||||
const dirname = cache.using(dir => dir);
|
|
||||||
return (0, _caching.makeStrongCache)(alias => createPluginDescriptors(items, dirname, alias).map(desc => loadCachedDescriptor(PLUGIN_DESCRIPTOR_CACHE, desc)));
|
|
||||||
});
|
|
||||||
const DEFAULT_OPTIONS = {};
|
|
||||||
|
|
||||||
function loadCachedDescriptor(cache, desc) {
|
|
||||||
const {
|
|
||||||
value,
|
|
||||||
options = DEFAULT_OPTIONS
|
|
||||||
} = desc;
|
|
||||||
if (options === false) return desc;
|
|
||||||
let cacheByOptions = cache.get(value);
|
|
||||||
|
|
||||||
if (!cacheByOptions) {
|
|
||||||
cacheByOptions = new WeakMap();
|
|
||||||
cache.set(value, cacheByOptions);
|
|
||||||
}
|
|
||||||
|
|
||||||
let possibilities = cacheByOptions.get(options);
|
|
||||||
|
|
||||||
if (!possibilities) {
|
|
||||||
possibilities = [];
|
|
||||||
cacheByOptions.set(options, possibilities);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (possibilities.indexOf(desc) === -1) {
|
|
||||||
const matches = possibilities.filter(possibility => isEqualDescriptor(possibility, desc));
|
|
||||||
|
|
||||||
if (matches.length > 0) {
|
|
||||||
return matches[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
possibilities.push(desc);
|
|
||||||
}
|
|
||||||
|
|
||||||
return desc;
|
|
||||||
}
|
|
||||||
|
|
||||||
function createPresetDescriptors(items, dirname, alias, passPerPreset) {
|
|
||||||
return createDescriptors("preset", items, dirname, alias, passPerPreset);
|
|
||||||
}
|
|
||||||
|
|
||||||
function createPluginDescriptors(items, dirname, alias) {
|
|
||||||
return createDescriptors("plugin", items, dirname, alias);
|
|
||||||
}
|
|
||||||
|
|
||||||
function createDescriptors(type, items, dirname, alias, ownPass) {
|
|
||||||
const descriptors = items.map((item, index) => createDescriptor(item, dirname, {
|
|
||||||
type,
|
|
||||||
alias: `${alias}$${index}`,
|
|
||||||
ownPass: !!ownPass
|
|
||||||
}));
|
|
||||||
assertNoDuplicates(descriptors);
|
|
||||||
return descriptors;
|
|
||||||
}
|
|
||||||
|
|
||||||
function createDescriptor(pair, dirname, {
|
|
||||||
type,
|
|
||||||
alias,
|
|
||||||
ownPass
|
|
||||||
}) {
|
|
||||||
const desc = (0, _item.getItemDescriptor)(pair);
|
|
||||||
|
|
||||||
if (desc) {
|
|
||||||
return desc;
|
|
||||||
}
|
|
||||||
|
|
||||||
let name;
|
|
||||||
let options;
|
|
||||||
let value = pair;
|
|
||||||
|
|
||||||
if (Array.isArray(value)) {
|
|
||||||
if (value.length === 3) {
|
|
||||||
[value, options, name] = value;
|
|
||||||
} else {
|
|
||||||
[value, options] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let file = undefined;
|
|
||||||
let filepath = null;
|
|
||||||
|
|
||||||
if (typeof value === "string") {
|
|
||||||
if (typeof type !== "string") {
|
|
||||||
throw new Error("To resolve a string-based item, the type of item must be given");
|
|
||||||
}
|
|
||||||
|
|
||||||
const resolver = type === "plugin" ? _files.loadPlugin : _files.loadPreset;
|
|
||||||
const request = value;
|
|
||||||
({
|
|
||||||
filepath,
|
|
||||||
value
|
|
||||||
} = resolver(value, dirname));
|
|
||||||
file = {
|
|
||||||
request,
|
|
||||||
resolved: filepath
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!value) {
|
|
||||||
throw new Error(`Unexpected falsy value: ${String(value)}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof value === "object" && value.__esModule) {
|
|
||||||
if (value.default) {
|
|
||||||
value = value.default;
|
|
||||||
} else {
|
|
||||||
throw new Error("Must export a default export when using ES6 modules.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof value !== "object" && typeof value !== "function") {
|
|
||||||
throw new Error(`Unsupported format: ${typeof value}. Expected an object or a function.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (filepath !== null && typeof value === "object" && value) {
|
|
||||||
throw new Error(`Plugin/Preset files are not allowed to export objects, only functions. In ${filepath}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
name,
|
|
||||||
alias: filepath || alias,
|
|
||||||
value,
|
|
||||||
options,
|
|
||||||
dirname,
|
|
||||||
ownPass,
|
|
||||||
file
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function assertNoDuplicates(items) {
|
|
||||||
const map = new Map();
|
|
||||||
|
|
||||||
for (const item of items) {
|
|
||||||
if (typeof item.value !== "function") continue;
|
|
||||||
let nameMap = map.get(item.value);
|
|
||||||
|
|
||||||
if (!nameMap) {
|
|
||||||
nameMap = new Set();
|
|
||||||
map.set(item.value, nameMap);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (nameMap.has(item.name)) {
|
|
||||||
const conflicts = items.filter(i => i.value === item.value);
|
|
||||||
throw new Error([`Duplicate plugin/preset detected.`, `If you'd like to use two separate instances of a plugin,`, `they need separate names, e.g.`, ``, ` plugins: [`, ` ['some-plugin', {}],`, ` ['some-plugin', {}, 'some unique name'],`, ` ]`, ``, `Duplicates detected are:`, `${JSON.stringify(conflicts, null, 2)}`].join("\n"));
|
|
||||||
}
|
|
||||||
|
|
||||||
nameMap.add(item.name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
312
node_modules/@babel/core/lib/config/files/configuration.js
generated
vendored
312
node_modules/@babel/core/lib/config/files/configuration.js
generated
vendored
@@ -1,312 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.findConfigUpwards = findConfigUpwards;
|
|
||||||
exports.findRelativeConfig = findRelativeConfig;
|
|
||||||
exports.findRootConfig = findRootConfig;
|
|
||||||
exports.loadConfig = loadConfig;
|
|
||||||
|
|
||||||
function _debug() {
|
|
||||||
const data = _interopRequireDefault(require("debug"));
|
|
||||||
|
|
||||||
_debug = function () {
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
function _path() {
|
|
||||||
const data = _interopRequireDefault(require("path"));
|
|
||||||
|
|
||||||
_path = function () {
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
function _fs() {
|
|
||||||
const data = _interopRequireDefault(require("fs"));
|
|
||||||
|
|
||||||
_fs = function () {
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
function _json() {
|
|
||||||
const data = _interopRequireDefault(require("json5"));
|
|
||||||
|
|
||||||
_json = function () {
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
function _resolve() {
|
|
||||||
const data = _interopRequireDefault(require("resolve"));
|
|
||||||
|
|
||||||
_resolve = function () {
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
var _caching = require("../caching");
|
|
||||||
|
|
||||||
var _configApi = _interopRequireDefault(require("../helpers/config-api"));
|
|
||||||
|
|
||||||
var _utils = require("./utils");
|
|
||||||
|
|
||||||
var _patternToRegex = _interopRequireDefault(require("../pattern-to-regex"));
|
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
||||||
|
|
||||||
const debug = (0, _debug().default)("babel:config:loading:files:configuration");
|
|
||||||
const ROOT_CONFIG_FILENAMES = ["babel.config.js", "babel.config.cjs", "babel.config.json"];
|
|
||||||
const RELATIVE_CONFIG_FILENAMES = [".babelrc", ".babelrc.js", ".babelrc.cjs"];
|
|
||||||
const BABELIGNORE_FILENAME = ".babelignore";
|
|
||||||
|
|
||||||
function findConfigUpwards(rootDir) {
|
|
||||||
let dirname = rootDir;
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
const configFileFound = ROOT_CONFIG_FILENAMES.some(filename => _fs().default.existsSync(_path().default.join(dirname, filename)));
|
|
||||||
if (configFileFound) return dirname;
|
|
||||||
|
|
||||||
const nextDir = _path().default.dirname(dirname);
|
|
||||||
|
|
||||||
if (dirname === nextDir) break;
|
|
||||||
dirname = nextDir;
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function findRelativeConfig(packageData, envName, caller) {
|
|
||||||
let config = null;
|
|
||||||
let ignore = null;
|
|
||||||
|
|
||||||
const dirname = _path().default.dirname(packageData.filepath);
|
|
||||||
|
|
||||||
for (const loc of packageData.directories) {
|
|
||||||
if (!config) {
|
|
||||||
config = loadOneConfig(RELATIVE_CONFIG_FILENAMES, loc, envName, caller, packageData.pkg && packageData.pkg.dirname === loc ? packageToBabelConfig(packageData.pkg) : null);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!ignore) {
|
|
||||||
const ignoreLoc = _path().default.join(loc, BABELIGNORE_FILENAME);
|
|
||||||
|
|
||||||
ignore = readIgnoreConfig(ignoreLoc);
|
|
||||||
|
|
||||||
if (ignore) {
|
|
||||||
debug("Found ignore %o from %o.", ignore.filepath, dirname);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
config,
|
|
||||||
ignore
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function findRootConfig(dirname, envName, caller) {
|
|
||||||
return loadOneConfig(ROOT_CONFIG_FILENAMES, dirname, envName, caller);
|
|
||||||
}
|
|
||||||
|
|
||||||
function loadOneConfig(names, dirname, envName, caller, previousConfig = null) {
|
|
||||||
const config = names.reduce((previousConfig, name) => {
|
|
||||||
const filepath = _path().default.resolve(dirname, name);
|
|
||||||
|
|
||||||
const config = readConfig(filepath, envName, caller);
|
|
||||||
|
|
||||||
if (config && previousConfig) {
|
|
||||||
throw new Error(`Multiple configuration files found. Please remove one:\n` + ` - ${_path().default.basename(previousConfig.filepath)}\n` + ` - ${name}\n` + `from ${dirname}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return config || previousConfig;
|
|
||||||
}, previousConfig);
|
|
||||||
|
|
||||||
if (config) {
|
|
||||||
debug("Found configuration %o from %o.", config.filepath, dirname);
|
|
||||||
}
|
|
||||||
|
|
||||||
return config;
|
|
||||||
}
|
|
||||||
|
|
||||||
function loadConfig(name, dirname, envName, caller) {
|
|
||||||
const filepath = _resolve().default.sync(name, {
|
|
||||||
basedir: dirname
|
|
||||||
});
|
|
||||||
|
|
||||||
const conf = readConfig(filepath, envName, caller);
|
|
||||||
|
|
||||||
if (!conf) {
|
|
||||||
throw new Error(`Config file ${filepath} contains no configuration data`);
|
|
||||||
}
|
|
||||||
|
|
||||||
debug("Loaded config %o from %o.", name, dirname);
|
|
||||||
return conf;
|
|
||||||
}
|
|
||||||
|
|
||||||
function readConfig(filepath, envName, caller) {
|
|
||||||
const ext = _path().default.extname(filepath);
|
|
||||||
|
|
||||||
return ext === ".js" || ext === ".cjs" ? readConfigJS(filepath, {
|
|
||||||
envName,
|
|
||||||
caller
|
|
||||||
}) : readConfigJSON5(filepath);
|
|
||||||
}
|
|
||||||
|
|
||||||
const LOADING_CONFIGS = new Set();
|
|
||||||
const readConfigJS = (0, _caching.makeStrongCache)((filepath, cache) => {
|
|
||||||
if (!_fs().default.existsSync(filepath)) {
|
|
||||||
cache.forever();
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (LOADING_CONFIGS.has(filepath)) {
|
|
||||||
cache.never();
|
|
||||||
debug("Auto-ignoring usage of config %o.", filepath);
|
|
||||||
return {
|
|
||||||
filepath,
|
|
||||||
dirname: _path().default.dirname(filepath),
|
|
||||||
options: {}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
let options;
|
|
||||||
|
|
||||||
try {
|
|
||||||
LOADING_CONFIGS.add(filepath);
|
|
||||||
|
|
||||||
const configModule = require(filepath);
|
|
||||||
|
|
||||||
options = configModule && configModule.__esModule ? configModule.default || undefined : configModule;
|
|
||||||
} catch (err) {
|
|
||||||
err.message = `${filepath}: Error while loading config - ${err.message}`;
|
|
||||||
throw err;
|
|
||||||
} finally {
|
|
||||||
LOADING_CONFIGS.delete(filepath);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof options === "function") {
|
|
||||||
options = options((0, _configApi.default)(cache));
|
|
||||||
if (!cache.configured()) throwConfigError();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!options || typeof options !== "object" || Array.isArray(options)) {
|
|
||||||
throw new Error(`${filepath}: Configuration should be an exported JavaScript object.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof options.then === "function") {
|
|
||||||
throw new Error(`You appear to be using an async configuration, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously return your config.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
filepath,
|
|
||||||
dirname: _path().default.dirname(filepath),
|
|
||||||
options
|
|
||||||
};
|
|
||||||
});
|
|
||||||
const packageToBabelConfig = (0, _caching.makeWeakCache)(file => {
|
|
||||||
const babel = file.options["babel"];
|
|
||||||
if (typeof babel === "undefined") return null;
|
|
||||||
|
|
||||||
if (typeof babel !== "object" || Array.isArray(babel) || babel === null) {
|
|
||||||
throw new Error(`${file.filepath}: .babel property must be an object`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
filepath: file.filepath,
|
|
||||||
dirname: file.dirname,
|
|
||||||
options: babel
|
|
||||||
};
|
|
||||||
});
|
|
||||||
const readConfigJSON5 = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
|
||||||
let options;
|
|
||||||
|
|
||||||
try {
|
|
||||||
options = _json().default.parse(content);
|
|
||||||
} catch (err) {
|
|
||||||
err.message = `${filepath}: Error while parsing config - ${err.message}`;
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!options) throw new Error(`${filepath}: No config detected`);
|
|
||||||
|
|
||||||
if (typeof options !== "object") {
|
|
||||||
throw new Error(`${filepath}: Config returned typeof ${typeof options}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(options)) {
|
|
||||||
throw new Error(`${filepath}: Expected config object but found array`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
filepath,
|
|
||||||
dirname: _path().default.dirname(filepath),
|
|
||||||
options
|
|
||||||
};
|
|
||||||
});
|
|
||||||
const readIgnoreConfig = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
|
||||||
const ignoreDir = _path().default.dirname(filepath);
|
|
||||||
|
|
||||||
const ignorePatterns = content.split("\n").map(line => line.replace(/#(.*?)$/, "").trim()).filter(line => !!line);
|
|
||||||
|
|
||||||
for (const pattern of ignorePatterns) {
|
|
||||||
if (pattern[0] === "!") {
|
|
||||||
throw new Error(`Negation of file paths is not supported.`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
filepath,
|
|
||||||
dirname: _path().default.dirname(filepath),
|
|
||||||
ignore: ignorePatterns.map(pattern => (0, _patternToRegex.default)(pattern, ignoreDir))
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
function throwConfigError() {
|
|
||||||
throw new Error(`\
|
|
||||||
Caching was left unconfigured. Babel's plugins, presets, and .babelrc.js files can be configured
|
|
||||||
for various types of caching, using the first param of their handler functions:
|
|
||||||
|
|
||||||
module.exports = function(api) {
|
|
||||||
// The API exposes the following:
|
|
||||||
|
|
||||||
// Cache the returned value forever and don't call this function again.
|
|
||||||
api.cache(true);
|
|
||||||
|
|
||||||
// Don't cache at all. Not recommended because it will be very slow.
|
|
||||||
api.cache(false);
|
|
||||||
|
|
||||||
// Cached based on the value of some function. If this function returns a value different from
|
|
||||||
// a previously-encountered value, the plugins will re-evaluate.
|
|
||||||
var env = api.cache(() => process.env.NODE_ENV);
|
|
||||||
|
|
||||||
// If testing for a specific env, we recommend specifics to avoid instantiating a plugin for
|
|
||||||
// any possible NODE_ENV value that might come up during plugin execution.
|
|
||||||
var isProd = api.cache(() => process.env.NODE_ENV === "production");
|
|
||||||
|
|
||||||
// .cache(fn) will perform a linear search though instances to find the matching plugin based
|
|
||||||
// based on previous instantiated plugins. If you want to recreate the plugin and discard the
|
|
||||||
// previous instance whenever something changes, you may use:
|
|
||||||
var isProd = api.cache.invalidate(() => process.env.NODE_ENV === "production");
|
|
||||||
|
|
||||||
// Note, we also expose the following more-verbose versions of the above examples:
|
|
||||||
api.cache.forever(); // api.cache(true)
|
|
||||||
api.cache.never(); // api.cache(false)
|
|
||||||
api.cache.using(fn); // api.cache(fn)
|
|
||||||
|
|
||||||
// Return the value that will be cached.
|
|
||||||
return { };
|
|
||||||
};`);
|
|
||||||
}
|
|
||||||
59
node_modules/@babel/core/lib/config/files/index-browser.js
generated
vendored
59
node_modules/@babel/core/lib/config/files/index-browser.js
generated
vendored
@@ -1,59 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.findConfigUpwards = findConfigUpwards;
|
|
||||||
exports.findPackageData = findPackageData;
|
|
||||||
exports.findRelativeConfig = findRelativeConfig;
|
|
||||||
exports.findRootConfig = findRootConfig;
|
|
||||||
exports.loadConfig = loadConfig;
|
|
||||||
exports.resolvePlugin = resolvePlugin;
|
|
||||||
exports.resolvePreset = resolvePreset;
|
|
||||||
exports.loadPlugin = loadPlugin;
|
|
||||||
exports.loadPreset = loadPreset;
|
|
||||||
|
|
||||||
function findConfigUpwards(rootDir) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function findPackageData(filepath) {
|
|
||||||
return {
|
|
||||||
filepath,
|
|
||||||
directories: [],
|
|
||||||
pkg: null,
|
|
||||||
isPackage: false
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function findRelativeConfig(pkgData, envName, caller) {
|
|
||||||
return {
|
|
||||||
pkg: null,
|
|
||||||
config: null,
|
|
||||||
ignore: null
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function findRootConfig(dirname, envName, caller) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function loadConfig(name, dirname, envName, caller) {
|
|
||||||
throw new Error(`Cannot load ${name} relative to ${dirname} in a browser`);
|
|
||||||
}
|
|
||||||
|
|
||||||
function resolvePlugin(name, dirname) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function resolvePreset(name, dirname) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function loadPlugin(name, dirname) {
|
|
||||||
throw new Error(`Cannot load plugin ${name} relative to ${dirname} in a browser`);
|
|
||||||
}
|
|
||||||
|
|
||||||
function loadPreset(name, dirname) {
|
|
||||||
throw new Error(`Cannot load preset ${name} relative to ${dirname} in a browser`);
|
|
||||||
}
|
|
||||||
67
node_modules/@babel/core/lib/config/files/index.js
generated
vendored
67
node_modules/@babel/core/lib/config/files/index.js
generated
vendored
@@ -1,67 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "findPackageData", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _package.findPackageData;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "findConfigUpwards", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _configuration.findConfigUpwards;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "findRelativeConfig", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _configuration.findRelativeConfig;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "findRootConfig", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _configuration.findRootConfig;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "loadConfig", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _configuration.loadConfig;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "resolvePlugin", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _plugins.resolvePlugin;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "resolvePreset", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _plugins.resolvePreset;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "loadPlugin", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _plugins.loadPlugin;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "loadPreset", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _plugins.loadPreset;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
var _package = require("./package");
|
|
||||||
|
|
||||||
var _configuration = require("./configuration");
|
|
||||||
|
|
||||||
var _plugins = require("./plugins");
|
|
||||||
|
|
||||||
({});
|
|
||||||
76
node_modules/@babel/core/lib/config/files/package.js
generated
vendored
76
node_modules/@babel/core/lib/config/files/package.js
generated
vendored
@@ -1,76 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.findPackageData = findPackageData;
|
|
||||||
|
|
||||||
function _path() {
|
|
||||||
const data = _interopRequireDefault(require("path"));
|
|
||||||
|
|
||||||
_path = function () {
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
var _utils = require("./utils");
|
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
||||||
|
|
||||||
const PACKAGE_FILENAME = "package.json";
|
|
||||||
|
|
||||||
function findPackageData(filepath) {
|
|
||||||
let pkg = null;
|
|
||||||
const directories = [];
|
|
||||||
let isPackage = true;
|
|
||||||
|
|
||||||
let dirname = _path().default.dirname(filepath);
|
|
||||||
|
|
||||||
while (!pkg && _path().default.basename(dirname) !== "node_modules") {
|
|
||||||
directories.push(dirname);
|
|
||||||
pkg = readConfigPackage(_path().default.join(dirname, PACKAGE_FILENAME));
|
|
||||||
|
|
||||||
const nextLoc = _path().default.dirname(dirname);
|
|
||||||
|
|
||||||
if (dirname === nextLoc) {
|
|
||||||
isPackage = false;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
dirname = nextLoc;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
filepath,
|
|
||||||
directories,
|
|
||||||
pkg,
|
|
||||||
isPackage
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const readConfigPackage = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
|
||||||
let options;
|
|
||||||
|
|
||||||
try {
|
|
||||||
options = JSON.parse(content);
|
|
||||||
} catch (err) {
|
|
||||||
err.message = `${filepath}: Error while parsing JSON - ${err.message}`;
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof options !== "object") {
|
|
||||||
throw new Error(`${filepath}: Config returned typeof ${typeof options}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(options)) {
|
|
||||||
throw new Error(`${filepath}: Expected config object but found array`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
filepath,
|
|
||||||
dirname: _path().default.dirname(filepath),
|
|
||||||
options
|
|
||||||
};
|
|
||||||
});
|
|
||||||
169
node_modules/@babel/core/lib/config/files/plugins.js
generated
vendored
169
node_modules/@babel/core/lib/config/files/plugins.js
generated
vendored
@@ -1,169 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.resolvePlugin = resolvePlugin;
|
|
||||||
exports.resolvePreset = resolvePreset;
|
|
||||||
exports.loadPlugin = loadPlugin;
|
|
||||||
exports.loadPreset = loadPreset;
|
|
||||||
|
|
||||||
function _debug() {
|
|
||||||
const data = _interopRequireDefault(require("debug"));
|
|
||||||
|
|
||||||
_debug = function () {
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
function _resolve() {
|
|
||||||
const data = _interopRequireDefault(require("resolve"));
|
|
||||||
|
|
||||||
_resolve = function () {
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
function _path() {
|
|
||||||
const data = _interopRequireDefault(require("path"));
|
|
||||||
|
|
||||||
_path = function () {
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
||||||
|
|
||||||
const debug = (0, _debug().default)("babel:config:loading:files:plugins");
|
|
||||||
const EXACT_RE = /^module:/;
|
|
||||||
const BABEL_PLUGIN_PREFIX_RE = /^(?!@|module:|[^/]+\/|babel-plugin-)/;
|
|
||||||
const BABEL_PRESET_PREFIX_RE = /^(?!@|module:|[^/]+\/|babel-preset-)/;
|
|
||||||
const BABEL_PLUGIN_ORG_RE = /^(@babel\/)(?!plugin-|[^/]+\/)/;
|
|
||||||
const BABEL_PRESET_ORG_RE = /^(@babel\/)(?!preset-|[^/]+\/)/;
|
|
||||||
const OTHER_PLUGIN_ORG_RE = /^(@(?!babel\/)[^/]+\/)(?![^/]*babel-plugin(?:-|\/|$)|[^/]+\/)/;
|
|
||||||
const OTHER_PRESET_ORG_RE = /^(@(?!babel\/)[^/]+\/)(?![^/]*babel-preset(?:-|\/|$)|[^/]+\/)/;
|
|
||||||
const OTHER_ORG_DEFAULT_RE = /^(@(?!babel$)[^/]+)$/;
|
|
||||||
|
|
||||||
function resolvePlugin(name, dirname) {
|
|
||||||
return resolveStandardizedName("plugin", name, dirname);
|
|
||||||
}
|
|
||||||
|
|
||||||
function resolvePreset(name, dirname) {
|
|
||||||
return resolveStandardizedName("preset", name, dirname);
|
|
||||||
}
|
|
||||||
|
|
||||||
function loadPlugin(name, dirname) {
|
|
||||||
const filepath = resolvePlugin(name, dirname);
|
|
||||||
|
|
||||||
if (!filepath) {
|
|
||||||
throw new Error(`Plugin ${name} not found relative to ${dirname}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const value = requireModule("plugin", filepath);
|
|
||||||
debug("Loaded plugin %o from %o.", name, dirname);
|
|
||||||
return {
|
|
||||||
filepath,
|
|
||||||
value
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function loadPreset(name, dirname) {
|
|
||||||
const filepath = resolvePreset(name, dirname);
|
|
||||||
|
|
||||||
if (!filepath) {
|
|
||||||
throw new Error(`Preset ${name} not found relative to ${dirname}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const value = requireModule("preset", filepath);
|
|
||||||
debug("Loaded preset %o from %o.", name, dirname);
|
|
||||||
return {
|
|
||||||
filepath,
|
|
||||||
value
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function standardizeName(type, name) {
|
|
||||||
if (_path().default.isAbsolute(name)) return name;
|
|
||||||
const isPreset = type === "preset";
|
|
||||||
return name.replace(isPreset ? BABEL_PRESET_PREFIX_RE : BABEL_PLUGIN_PREFIX_RE, `babel-${type}-`).replace(isPreset ? BABEL_PRESET_ORG_RE : BABEL_PLUGIN_ORG_RE, `$1${type}-`).replace(isPreset ? OTHER_PRESET_ORG_RE : OTHER_PLUGIN_ORG_RE, `$1babel-${type}-`).replace(OTHER_ORG_DEFAULT_RE, `$1/babel-${type}`).replace(EXACT_RE, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
function resolveStandardizedName(type, name, dirname = process.cwd()) {
|
|
||||||
const standardizedName = standardizeName(type, name);
|
|
||||||
|
|
||||||
try {
|
|
||||||
return _resolve().default.sync(standardizedName, {
|
|
||||||
basedir: dirname
|
|
||||||
});
|
|
||||||
} catch (e) {
|
|
||||||
if (e.code !== "MODULE_NOT_FOUND") throw e;
|
|
||||||
|
|
||||||
if (standardizedName !== name) {
|
|
||||||
let resolvedOriginal = false;
|
|
||||||
|
|
||||||
try {
|
|
||||||
_resolve().default.sync(name, {
|
|
||||||
basedir: dirname
|
|
||||||
});
|
|
||||||
|
|
||||||
resolvedOriginal = true;
|
|
||||||
} catch (e2) {}
|
|
||||||
|
|
||||||
if (resolvedOriginal) {
|
|
||||||
e.message += `\n- If you want to resolve "${name}", use "module:${name}"`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let resolvedBabel = false;
|
|
||||||
|
|
||||||
try {
|
|
||||||
_resolve().default.sync(standardizeName(type, "@babel/" + name), {
|
|
||||||
basedir: dirname
|
|
||||||
});
|
|
||||||
|
|
||||||
resolvedBabel = true;
|
|
||||||
} catch (e2) {}
|
|
||||||
|
|
||||||
if (resolvedBabel) {
|
|
||||||
e.message += `\n- Did you mean "@babel/${name}"?`;
|
|
||||||
}
|
|
||||||
|
|
||||||
let resolvedOppositeType = false;
|
|
||||||
const oppositeType = type === "preset" ? "plugin" : "preset";
|
|
||||||
|
|
||||||
try {
|
|
||||||
_resolve().default.sync(standardizeName(oppositeType, name), {
|
|
||||||
basedir: dirname
|
|
||||||
});
|
|
||||||
|
|
||||||
resolvedOppositeType = true;
|
|
||||||
} catch (e2) {}
|
|
||||||
|
|
||||||
if (resolvedOppositeType) {
|
|
||||||
e.message += `\n- Did you accidentally pass a ${oppositeType} as a ${type}?`;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const LOADING_MODULES = new Set();
|
|
||||||
|
|
||||||
function requireModule(type, name) {
|
|
||||||
if (LOADING_MODULES.has(name)) {
|
|
||||||
throw new Error(`Reentrant ${type} detected trying to load "${name}". This module is not ignored ` + "and is trying to load itself while compiling itself, leading to a dependency cycle. " + 'We recommend adding it to your "ignore" list in your babelrc, or to a .babelignore.');
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
LOADING_MODULES.add(name);
|
|
||||||
return require(name);
|
|
||||||
} finally {
|
|
||||||
LOADING_MODULES.delete(name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
0
node_modules/@babel/core/lib/config/files/types.js
generated
vendored
0
node_modules/@babel/core/lib/config/files/types.js
generated
vendored
41
node_modules/@babel/core/lib/config/files/utils.js
generated
vendored
41
node_modules/@babel/core/lib/config/files/utils.js
generated
vendored
@@ -1,41 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.makeStaticFileCache = makeStaticFileCache;
|
|
||||||
|
|
||||||
function _fs() {
|
|
||||||
const data = _interopRequireDefault(require("fs"));
|
|
||||||
|
|
||||||
_fs = function () {
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
var _caching = require("../caching");
|
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
||||||
|
|
||||||
function makeStaticFileCache(fn) {
|
|
||||||
return (0, _caching.makeStrongCache)((filepath, cache) => {
|
|
||||||
if (cache.invalidate(() => fileMtime(filepath)) === null) {
|
|
||||||
cache.forever();
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return fn(filepath, _fs().default.readFileSync(filepath, "utf8"));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function fileMtime(filepath) {
|
|
||||||
try {
|
|
||||||
return +_fs().default.statSync(filepath).mtime;
|
|
||||||
} catch (e) {
|
|
||||||
if (e.code !== "ENOENT" && e.code !== "ENOTDIR") throw e;
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
281
node_modules/@babel/core/lib/config/full.js
generated
vendored
281
node_modules/@babel/core/lib/config/full.js
generated
vendored
@@ -1,281 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.default = loadFullConfig;
|
|
||||||
|
|
||||||
var _util = require("./util");
|
|
||||||
|
|
||||||
var context = _interopRequireWildcard(require("../index"));
|
|
||||||
|
|
||||||
var _plugin = _interopRequireDefault(require("./plugin"));
|
|
||||||
|
|
||||||
var _item = require("./item");
|
|
||||||
|
|
||||||
var _configChain = require("./config-chain");
|
|
||||||
|
|
||||||
function _traverse() {
|
|
||||||
const data = _interopRequireDefault(require("@babel/traverse"));
|
|
||||||
|
|
||||||
_traverse = function () {
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
var _caching = require("./caching");
|
|
||||||
|
|
||||||
var _options = require("./validation/options");
|
|
||||||
|
|
||||||
var _plugins = require("./validation/plugins");
|
|
||||||
|
|
||||||
var _configApi = _interopRequireDefault(require("./helpers/config-api"));
|
|
||||||
|
|
||||||
var _partial = _interopRequireDefault(require("./partial"));
|
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
||||||
|
|
||||||
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
|
|
||||||
|
|
||||||
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
|
|
||||||
|
|
||||||
function loadFullConfig(inputOpts) {
|
|
||||||
const result = (0, _partial.default)(inputOpts);
|
|
||||||
|
|
||||||
if (!result) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const {
|
|
||||||
options,
|
|
||||||
context
|
|
||||||
} = result;
|
|
||||||
const optionDefaults = {};
|
|
||||||
const passes = [[]];
|
|
||||||
|
|
||||||
try {
|
|
||||||
const {
|
|
||||||
plugins,
|
|
||||||
presets
|
|
||||||
} = options;
|
|
||||||
|
|
||||||
if (!plugins || !presets) {
|
|
||||||
throw new Error("Assertion failure - plugins and presets exist");
|
|
||||||
}
|
|
||||||
|
|
||||||
const ignored = function recurseDescriptors(config, pass) {
|
|
||||||
const plugins = config.plugins.reduce((acc, descriptor) => {
|
|
||||||
if (descriptor.options !== false) {
|
|
||||||
acc.push(loadPluginDescriptor(descriptor, context));
|
|
||||||
}
|
|
||||||
|
|
||||||
return acc;
|
|
||||||
}, []);
|
|
||||||
const presets = config.presets.reduce((acc, descriptor) => {
|
|
||||||
if (descriptor.options !== false) {
|
|
||||||
acc.push({
|
|
||||||
preset: loadPresetDescriptor(descriptor, context),
|
|
||||||
pass: descriptor.ownPass ? [] : pass
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return acc;
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
if (presets.length > 0) {
|
|
||||||
passes.splice(1, 0, ...presets.map(o => o.pass).filter(p => p !== pass));
|
|
||||||
|
|
||||||
for (const {
|
|
||||||
preset,
|
|
||||||
pass
|
|
||||||
} of presets) {
|
|
||||||
if (!preset) return true;
|
|
||||||
const ignored = recurseDescriptors({
|
|
||||||
plugins: preset.plugins,
|
|
||||||
presets: preset.presets
|
|
||||||
}, pass);
|
|
||||||
if (ignored) return true;
|
|
||||||
preset.options.forEach(opts => {
|
|
||||||
(0, _util.mergeOptions)(optionDefaults, opts);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (plugins.length > 0) {
|
|
||||||
pass.unshift(...plugins);
|
|
||||||
}
|
|
||||||
}({
|
|
||||||
plugins: plugins.map(item => {
|
|
||||||
const desc = (0, _item.getItemDescriptor)(item);
|
|
||||||
|
|
||||||
if (!desc) {
|
|
||||||
throw new Error("Assertion failure - must be config item");
|
|
||||||
}
|
|
||||||
|
|
||||||
return desc;
|
|
||||||
}),
|
|
||||||
presets: presets.map(item => {
|
|
||||||
const desc = (0, _item.getItemDescriptor)(item);
|
|
||||||
|
|
||||||
if (!desc) {
|
|
||||||
throw new Error("Assertion failure - must be config item");
|
|
||||||
}
|
|
||||||
|
|
||||||
return desc;
|
|
||||||
})
|
|
||||||
}, passes[0]);
|
|
||||||
|
|
||||||
if (ignored) return null;
|
|
||||||
} catch (e) {
|
|
||||||
if (!/^\[BABEL\]/.test(e.message)) {
|
|
||||||
e.message = `[BABEL] ${context.filename || "unknown"}: ${e.message}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
|
|
||||||
const opts = optionDefaults;
|
|
||||||
(0, _util.mergeOptions)(opts, options);
|
|
||||||
opts.plugins = passes[0];
|
|
||||||
opts.presets = passes.slice(1).filter(plugins => plugins.length > 0).map(plugins => ({
|
|
||||||
plugins
|
|
||||||
}));
|
|
||||||
opts.passPerPreset = opts.presets.length > 0;
|
|
||||||
return {
|
|
||||||
options: opts,
|
|
||||||
passes: passes
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const loadDescriptor = (0, _caching.makeWeakCache)(({
|
|
||||||
value,
|
|
||||||
options,
|
|
||||||
dirname,
|
|
||||||
alias
|
|
||||||
}, cache) => {
|
|
||||||
if (options === false) throw new Error("Assertion failure");
|
|
||||||
options = options || {};
|
|
||||||
let item = value;
|
|
||||||
|
|
||||||
if (typeof value === "function") {
|
|
||||||
const api = Object.assign({}, context, {}, (0, _configApi.default)(cache));
|
|
||||||
|
|
||||||
try {
|
|
||||||
item = value(api, options, dirname);
|
|
||||||
} catch (e) {
|
|
||||||
if (alias) {
|
|
||||||
e.message += ` (While processing: ${JSON.stringify(alias)})`;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!item || typeof item !== "object") {
|
|
||||||
throw new Error("Plugin/Preset did not return an object.");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof item.then === "function") {
|
|
||||||
throw new Error(`You appear to be using an async plugin, ` + `which your current version of Babel does not support. ` + `If you're using a published plugin, ` + `you may need to upgrade your @babel/core version.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
value: item,
|
|
||||||
options,
|
|
||||||
dirname,
|
|
||||||
alias
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
function loadPluginDescriptor(descriptor, context) {
|
|
||||||
if (descriptor.value instanceof _plugin.default) {
|
|
||||||
if (descriptor.options) {
|
|
||||||
throw new Error("Passed options to an existing Plugin instance will not work.");
|
|
||||||
}
|
|
||||||
|
|
||||||
return descriptor.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
return instantiatePlugin(loadDescriptor(descriptor, context), context);
|
|
||||||
}
|
|
||||||
|
|
||||||
const instantiatePlugin = (0, _caching.makeWeakCache)(({
|
|
||||||
value,
|
|
||||||
options,
|
|
||||||
dirname,
|
|
||||||
alias
|
|
||||||
}, cache) => {
|
|
||||||
const pluginObj = (0, _plugins.validatePluginObject)(value);
|
|
||||||
const plugin = Object.assign({}, pluginObj);
|
|
||||||
|
|
||||||
if (plugin.visitor) {
|
|
||||||
plugin.visitor = _traverse().default.explode(Object.assign({}, plugin.visitor));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (plugin.inherits) {
|
|
||||||
const inheritsDescriptor = {
|
|
||||||
name: undefined,
|
|
||||||
alias: `${alias}$inherits`,
|
|
||||||
value: plugin.inherits,
|
|
||||||
options,
|
|
||||||
dirname
|
|
||||||
};
|
|
||||||
const inherits = cache.invalidate(data => loadPluginDescriptor(inheritsDescriptor, data));
|
|
||||||
plugin.pre = chain(inherits.pre, plugin.pre);
|
|
||||||
plugin.post = chain(inherits.post, plugin.post);
|
|
||||||
plugin.manipulateOptions = chain(inherits.manipulateOptions, plugin.manipulateOptions);
|
|
||||||
plugin.visitor = _traverse().default.visitors.merge([inherits.visitor || {}, plugin.visitor || {}]);
|
|
||||||
}
|
|
||||||
|
|
||||||
return new _plugin.default(plugin, options, alias);
|
|
||||||
});
|
|
||||||
|
|
||||||
const validateIfOptionNeedsFilename = (options, descriptor) => {
|
|
||||||
if (options.test || options.include || options.exclude) {
|
|
||||||
const formattedPresetName = descriptor.name ? `"${descriptor.name}"` : "/* your preset */";
|
|
||||||
throw new Error([`Preset ${formattedPresetName} requires a filename to be set when babel is called directly,`, `\`\`\``, `babel.transform(code, { filename: 'file.ts', presets: [${formattedPresetName}] });`, `\`\`\``, `See https://babeljs.io/docs/en/options#filename for more information.`].join("\n"));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const validatePreset = (preset, context, descriptor) => {
|
|
||||||
if (!context.filename) {
|
|
||||||
const {
|
|
||||||
options
|
|
||||||
} = preset;
|
|
||||||
validateIfOptionNeedsFilename(options, descriptor);
|
|
||||||
|
|
||||||
if (options.overrides) {
|
|
||||||
options.overrides.forEach(overrideOptions => validateIfOptionNeedsFilename(overrideOptions, descriptor));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const loadPresetDescriptor = (descriptor, context) => {
|
|
||||||
const preset = instantiatePreset(loadDescriptor(descriptor, context));
|
|
||||||
validatePreset(preset, context, descriptor);
|
|
||||||
return (0, _configChain.buildPresetChain)(preset, context);
|
|
||||||
};
|
|
||||||
|
|
||||||
const instantiatePreset = (0, _caching.makeWeakCache)(({
|
|
||||||
value,
|
|
||||||
dirname,
|
|
||||||
alias
|
|
||||||
}) => {
|
|
||||||
return {
|
|
||||||
options: (0, _options.validate)("preset", value),
|
|
||||||
alias,
|
|
||||||
dirname
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
function chain(a, b) {
|
|
||||||
const fns = [a, b].filter(Boolean);
|
|
||||||
if (fns.length <= 1) return fns[0];
|
|
||||||
return function (...args) {
|
|
||||||
for (const fn of fns) {
|
|
||||||
fn.apply(this, args);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
86
node_modules/@babel/core/lib/config/helpers/config-api.js
generated
vendored
86
node_modules/@babel/core/lib/config/helpers/config-api.js
generated
vendored
@@ -1,86 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.default = makeAPI;
|
|
||||||
|
|
||||||
function _semver() {
|
|
||||||
const data = _interopRequireDefault(require("semver"));
|
|
||||||
|
|
||||||
_semver = function () {
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ = require("../../");
|
|
||||||
|
|
||||||
var _caching = require("../caching");
|
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
||||||
|
|
||||||
function makeAPI(cache) {
|
|
||||||
const env = value => cache.using(data => {
|
|
||||||
if (typeof value === "undefined") return data.envName;
|
|
||||||
|
|
||||||
if (typeof value === "function") {
|
|
||||||
return (0, _caching.assertSimpleType)(value(data.envName));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!Array.isArray(value)) value = [value];
|
|
||||||
return value.some(entry => {
|
|
||||||
if (typeof entry !== "string") {
|
|
||||||
throw new Error("Unexpected non-string value");
|
|
||||||
}
|
|
||||||
|
|
||||||
return entry === data.envName;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
const caller = cb => cache.using(data => (0, _caching.assertSimpleType)(cb(data.caller)));
|
|
||||||
|
|
||||||
return {
|
|
||||||
version: _.version,
|
|
||||||
cache: cache.simple(),
|
|
||||||
env,
|
|
||||||
async: () => false,
|
|
||||||
caller,
|
|
||||||
assertVersion,
|
|
||||||
tokTypes: undefined
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function assertVersion(range) {
|
|
||||||
if (typeof range === "number") {
|
|
||||||
if (!Number.isInteger(range)) {
|
|
||||||
throw new Error("Expected string or integer value.");
|
|
||||||
}
|
|
||||||
|
|
||||||
range = `^${range}.0.0-0`;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof range !== "string") {
|
|
||||||
throw new Error("Expected string or integer value.");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (_semver().default.satisfies(_.version, range)) return;
|
|
||||||
const limit = Error.stackTraceLimit;
|
|
||||||
|
|
||||||
if (typeof limit === "number" && limit < 25) {
|
|
||||||
Error.stackTraceLimit = 25;
|
|
||||||
}
|
|
||||||
|
|
||||||
const err = new Error(`Requires Babel "${range}", but was loaded with "${_.version}". ` + `If you are sure you have a compatible version of @babel/core, ` + `it is likely that something in your build process is loading the ` + `wrong version. Inspect the stack trace of this error to look for ` + `the first entry that doesn't mention "@babel/core" or "babel-core" ` + `to see what is calling Babel.`);
|
|
||||||
|
|
||||||
if (typeof limit === "number") {
|
|
||||||
Error.stackTraceLimit = limit;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw Object.assign(err, {
|
|
||||||
code: "BABEL_VERSION_UNSUPPORTED",
|
|
||||||
version: _.version,
|
|
||||||
range
|
|
||||||
});
|
|
||||||
}
|
|
||||||
10
node_modules/@babel/core/lib/config/helpers/environment.js
generated
vendored
10
node_modules/@babel/core/lib/config/helpers/environment.js
generated
vendored
@@ -1,10 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.getEnv = getEnv;
|
|
||||||
|
|
||||||
function getEnv(defaultValue = "development") {
|
|
||||||
return process.env.BABEL_ENV || process.env.NODE_ENV || defaultValue;
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user