mirror of
https://github.com/github/codeql-action.git
synced 2025-12-23 15:50:11 +08:00
Compare commits
106 Commits
aeisenberg
...
v2.2.6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
16964e90ba | ||
|
|
74cbab4958 | ||
|
|
e12a2ecd45 | ||
|
|
d47d4c8047 | ||
|
|
f13b180fb8 | ||
|
|
a3cf96418e | ||
|
|
0c27d0da4a | ||
|
|
e4b846c482 | ||
|
|
c310f094dd | ||
|
|
4366485427 | ||
|
|
8340258886 | ||
|
|
6ef6e50882 | ||
|
|
eb40427b00 | ||
|
|
7806af3040 | ||
|
|
abf1cea835 | ||
|
|
e5ade42937 | ||
|
|
6f079be771 | ||
|
|
100bd7bbef | ||
|
|
a6d3a44519 | ||
|
|
5e4af3a25d | ||
|
|
e812e63bb6 | ||
|
|
a589d4087e | ||
|
|
98d24e5629 | ||
|
|
903be79953 | ||
|
|
18ff14b615 | ||
|
|
36a249f5ae | ||
|
|
041757fc59 | ||
|
|
8f19113f88 | ||
|
|
cf1855ae37 | ||
|
|
652709d1b9 | ||
|
|
32dc499307 | ||
|
|
b742728ac2 | ||
|
|
237a258d2b | ||
|
|
5972e6d72e | ||
|
|
164027e682 | ||
|
|
736263f8fe | ||
|
|
3dde1f3512 | ||
|
|
d7d7567b0e | ||
|
|
0e4e857bab | ||
|
|
08d1f21d4f | ||
|
|
f3bd25eefa | ||
|
|
41f1810e52 | ||
|
|
d87ad69338 | ||
|
|
8242edb8ed | ||
|
|
3095a09bb0 | ||
|
|
e00cd12e3e | ||
|
|
a25536bc80 | ||
|
|
a2487fb969 | ||
|
|
e187d074ed | ||
|
|
89c5165e5a | ||
|
|
ba216f7d34 | ||
|
|
68f4f0d3bb | ||
|
|
12d9a244fa | ||
|
|
17573ee1cc | ||
|
|
b6975b4b1a | ||
|
|
b011dbdedf | ||
|
|
40babc141f | ||
|
|
5492b7d104 | ||
|
|
3c81243bb1 | ||
|
|
e2f72f11e4 | ||
|
|
7ba5ed7eed | ||
|
|
21f3020df6 | ||
|
|
b872c5adfd | ||
|
|
8775e86802 | ||
|
|
a2ad80b966 | ||
|
|
c4e22e9fce | ||
|
|
db534af2ae | ||
|
|
bbe8d375fd | ||
|
|
4369dda4ae | ||
|
|
4f08c2cf20 | ||
|
|
81644f35ff | ||
|
|
9ab6aa64a0 | ||
|
|
256973e279 | ||
|
|
59b25b480f | ||
|
|
39d8d7e78f | ||
|
|
39c954c513 | ||
|
|
8af83634ca | ||
|
|
927de483f0 | ||
|
|
e4c0a1b24d | ||
|
|
d3962273b3 | ||
|
|
c3cb270725 | ||
|
|
2b674f7ab9 | ||
|
|
6d47a7c8b1 | ||
|
|
c6ff11c1c4 | ||
|
|
d3f2b2e6d2 | ||
|
|
d49282c3b5 | ||
|
|
c5c475188a | ||
|
|
f140af5e28 | ||
|
|
e0fc1c91b2 | ||
|
|
b95df0b2e7 | ||
|
|
2fed02cbe2 | ||
|
|
0b2a40fa4a | ||
|
|
395ec04a8b | ||
|
|
e1070bd101 | ||
|
|
3ebbd71c74 | ||
|
|
2ae6e13cc3 | ||
|
|
4664f39699 | ||
|
|
b2e16761f3 | ||
|
|
592a896a53 | ||
|
|
4a6b5a54c2 | ||
|
|
436dbd9100 | ||
|
|
d966969093 | ||
|
|
f6d03f448d | ||
|
|
43f1a6c701 | ||
|
|
75ae065ae6 | ||
|
|
0a9e9db27f |
@@ -33,6 +33,12 @@
|
|||||||
"alphabetize": {"order": "asc"},
|
"alphabetize": {"order": "asc"},
|
||||||
"newlines-between": "always"
|
"newlines-between": "always"
|
||||||
}],
|
}],
|
||||||
|
"max-len": ["error", {
|
||||||
|
"code": 120,
|
||||||
|
"ignoreUrls": true,
|
||||||
|
"ignoreStrings": true,
|
||||||
|
"ignoreTemplateLiterals": true
|
||||||
|
}],
|
||||||
"no-async-foreach/no-async-foreach": "error",
|
"no-async-foreach/no-async-foreach": "error",
|
||||||
"no-console": "off",
|
"no-console": "off",
|
||||||
"no-sequences": "error",
|
"no-sequences": "error",
|
||||||
|
|||||||
1
.github/codeql/codeql-config.yml
vendored
1
.github/codeql/codeql-config.yml
vendored
@@ -7,6 +7,7 @@ queries:
|
|||||||
# we include both even though one is a superset of the
|
# we include both even though one is a superset of the
|
||||||
# other, because we're testing the parsing logic and
|
# other, because we're testing the parsing logic and
|
||||||
# that the suites exist in the codeql bundle.
|
# that the suites exist in the codeql bundle.
|
||||||
|
- uses: security-experimental
|
||||||
- uses: security-extended
|
- uses: security-extended
|
||||||
- uses: security-and-quality
|
- uses: security-and-quality
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
|
|||||||
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@@ -15,3 +15,7 @@ updates:
|
|||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: weekly
|
interval: weekly
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: "/.github/setup-swift/" # All subdirectories outside of "/.github/workflows" must be explicitly included.
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
|||||||
2
.github/setup-swift/action.yml
vendored
2
.github/setup-swift/action.yml
vendored
@@ -26,7 +26,7 @@ runs:
|
|||||||
VERSION="5.7.0"
|
VERSION="5.7.0"
|
||||||
fi
|
fi
|
||||||
echo "version=$VERSION" | tee -a $GITHUB_OUTPUT
|
echo "version=$VERSION" | tee -a $GITHUB_OUTPUT
|
||||||
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
- uses: swift-actions/setup-swift@da0e3e04b5e3e15dbc3861bd835ad9f0afe56296 # Please update the corresponding SHA in the CLI's CodeQL Action Integration Test.
|
||||||
if: "(runner.os != 'Windows') && (matrix.version == 'cached' || matrix.version == 'latest' || matrix.version == 'nightly-latest')"
|
if: "(runner.os != 'Windows') && (matrix.version == 'cached' || matrix.version == 'latest' || matrix.version == 'nightly-latest')"
|
||||||
with:
|
with:
|
||||||
swift-version: "${{steps.get_swift_version.outputs.version}}"
|
swift-version: "${{steps.get_swift_version.outputs.version}}"
|
||||||
|
|||||||
2
.github/update-release-branch.py
vendored
2
.github/update-release-branch.py
vendored
@@ -161,7 +161,7 @@ def update_changelog(version):
|
|||||||
else:
|
else:
|
||||||
content = EMPTY_CHANGELOG
|
content = EMPTY_CHANGELOG
|
||||||
|
|
||||||
newContent = content.replace('[UNRELEASED]', f'${version} - {get_today_string()}', 1)
|
newContent = content.replace('[UNRELEASED]', f'{version} - {get_today_string()}', 1)
|
||||||
|
|
||||||
with open('CHANGELOG.md', 'w') as f:
|
with open('CHANGELOG.md', 'w') as f:
|
||||||
f.write(newContent)
|
f.write(newContent)
|
||||||
|
|||||||
42
.github/workflows/__init-with-registries.yml
generated
vendored
42
.github/workflows/__init-with-registries.yml
generated
vendored
@@ -25,6 +25,18 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: cached
|
||||||
|
- os: macos-latest
|
||||||
|
version: cached
|
||||||
|
- os: windows-latest
|
||||||
|
version: cached
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: latest
|
||||||
|
- os: macos-latest
|
||||||
|
version: latest
|
||||||
|
- os: windows-latest
|
||||||
|
version: latest
|
||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
@@ -75,5 +87,35 @@ jobs:
|
|||||||
echo "::error $CODEQL_PACK1 pack was not installed."
|
echo "::error $CODEQL_PACK1 pack was not installed."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
- name: Verify qlconfig.yml file was created
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
QLCONFIG_PATH=$RUNNER_TEMP/qlconfig.yml
|
||||||
|
echo "Expected qlconfig.yml file to be created at $QLCONFIG_PATH"
|
||||||
|
if [[ -f $QLCONFIG_PATH ]]
|
||||||
|
then
|
||||||
|
echo "qlconfig.yml file was created."
|
||||||
|
else
|
||||||
|
echo "::error qlconfig.yml file was not created."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Verify contents of qlconfig.yml
|
||||||
|
# yq is not available on windows
|
||||||
|
if: runner.os != 'Windows'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
QLCONFIG_PATH=$RUNNER_TEMP/qlconfig.yml
|
||||||
|
cat $QLCONFIG_PATH | yq -e '.registries[] | select(.url == "https://ghcr.io/v2/") | select(.packages == "*/*")'
|
||||||
|
if [[ $? -eq 0 ]]
|
||||||
|
then
|
||||||
|
echo "Registry was added to qlconfig.yml file."
|
||||||
|
else
|
||||||
|
echo "::error Registry was not added to qlconfig.yml file."
|
||||||
|
echo "Contents of qlconfig.yml file:"
|
||||||
|
cat $QLCONFIG_PATH
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
env:
|
env:
|
||||||
CODEQL_ACTION_TEST_MODE: true
|
CODEQL_ACTION_TEST_MODE: true
|
||||||
|
|||||||
10
.github/workflows/script/check-node-modules.sh
vendored
10
.github/workflows/script/check-node-modules.sh
vendored
@@ -7,13 +7,9 @@ if [ ! -z "$(git status --porcelain)" ]; then
|
|||||||
>&2 echo "Failed: Repo should be clean before testing!"
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
# When updating this, make sure to update the npm version in
|
|
||||||
# `.github/workflows/update-dependencies.yml` too.
|
"$(dirname "$0")/update-node-modules.sh" check-only
|
||||||
sudo npm install --force -g npm@9.2.0
|
|
||||||
# Reinstall modules and then clean to remove absolute paths
|
|
||||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
|
||||||
npm ci
|
|
||||||
npm run removeNPMAbsolutePaths
|
|
||||||
# Check that repo is still clean
|
# Check that repo is still clean
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
# If we get a fail here then the PR needs attention
|
# If we get a fail here then the PR needs attention
|
||||||
|
|||||||
18
.github/workflows/script/update-node-modules.sh
vendored
Executable file
18
.github/workflows/script/update-node-modules.sh
vendored
Executable file
@@ -0,0 +1,18 @@
|
|||||||
|
if [ "$1" != "update" && "$1" != "check-only" ]; then
|
||||||
|
>&2 echo "Failed: Invalid argument. Must be 'update' or 'check-only'"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
sudo npm install --force -g npm@9.2.0
|
||||||
|
|
||||||
|
# clean the npm cache to ensure we don't have any files owned by root
|
||||||
|
sudo npm cache clean --force
|
||||||
|
|
||||||
|
if [ "$1" = "update" ]; then
|
||||||
|
npm install
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Reinstall modules and then clean to remove absolute paths
|
||||||
|
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||||
|
npm ci
|
||||||
|
npm run removeNPMAbsolutePaths
|
||||||
7
.github/workflows/update-dependencies.yml
vendored
7
.github/workflows/update-dependencies.yml
vendored
@@ -27,12 +27,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
git fetch origin "$BRANCH" --depth=1
|
git fetch origin "$BRANCH" --depth=1
|
||||||
git checkout "origin/$BRANCH"
|
git checkout "origin/$BRANCH"
|
||||||
# When updating this, make sure to update the npm version in
|
.github/workflows/script/update-node-modules.sh update
|
||||||
# `.github/workflows/script/check-node-modules.sh` too.
|
|
||||||
sudo npm install --force -g npm@9.2.0
|
|
||||||
npm install
|
|
||||||
npm ci
|
|
||||||
npm run removeNPMAbsolutePaths
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
git config --global user.email "github-actions@github.com"
|
git config --global user.email "github-actions@github.com"
|
||||||
git config --global user.name "github-actions[bot]"
|
git config --global user.name "github-actions[bot]"
|
||||||
|
|||||||
26
CHANGELOG.md
26
CHANGELOG.md
@@ -1,6 +1,30 @@
|
|||||||
# CodeQL Action Changelog
|
# CodeQL Action Changelog
|
||||||
|
|
||||||
## [UNRELEASED]
|
## 2.2.6 - 10 Mar 2023
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.12.4.
|
||||||
|
|
||||||
|
## 2.2.5 - 24 Feb 2023
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.12.3. [#1543](https://github.com/github/codeql-action/pull/1543)
|
||||||
|
|
||||||
|
## 2.2.4 - 10 Feb 2023
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 2.2.3 - 08 Feb 2023
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.12.2. [#1518](https://github.com/github/codeql-action/pull/1518)
|
||||||
|
|
||||||
|
## 2.2.2 - 06 Feb 2023
|
||||||
|
|
||||||
|
- Fix an issue where customers using the CodeQL Action with the [CodeQL Action sync tool](https://docs.github.com/en/enterprise-server@3.7/admin/code-security/managing-github-advanced-security-for-your-enterprise/configuring-code-scanning-for-your-appliance#configuring-codeql-analysis-on-a-server-without-internet-access) would not be able to obtain the CodeQL tools. [#1517](https://github.com/github/codeql-action/pull/1517)
|
||||||
|
|
||||||
|
## 2.2.1 - 27 Jan 2023
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 2.2.0 - 26 Jan 2023
|
||||||
|
|
||||||
- Improve stability when choosing the default version of CodeQL to use in code scanning workflow runs on Actions on GitHub.com. [#1475](https://github.com/github/codeql-action/pull/1475)
|
- Improve stability when choosing the default version of CodeQL to use in code scanning workflow runs on Actions on GitHub.com. [#1475](https://github.com/github/codeql-action/pull/1475)
|
||||||
- This change addresses customer reports of code scanning alerts on GitHub.com being closed and reopened during the rollout of new versions of CodeQL in the GitHub Actions [runner images](https://github.com/actions/runner-images).
|
- This change addresses customer reports of code scanning alerts on GitHub.com being closed and reopened during the rollout of new versions of CodeQL in the GitHub Actions [runner images](https://github.com/actions/runner-images).
|
||||||
|
|||||||
@@ -67,12 +67,8 @@ Here are a few things you can do that will increase the likelihood of your pull
|
|||||||
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into releases/v2" pull request, and bumps the patch version of the CodeQL Action.
|
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into releases/v2" pull request, and bumps the patch version of the CodeQL Action.
|
||||||
|
|
||||||
Approve the mergeback PR and automerge it.
|
Approve the mergeback PR and automerge it.
|
||||||
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Update release branch" workflow will create a "Merge releases/v2 into releases/v1" pull request to merge the changes since the last release into the `releases/v1` release branch.
|
|
||||||
This ensures we keep both the `releases/v1` and `releases/v2` release branches up to date and fully supported.
|
|
||||||
|
|
||||||
Review the checklist items in the pull request description.
|
Once the mergeback has been merged to `main`, the release is complete.
|
||||||
Once you've checked off all the items, approve the PR and automerge it.
|
|
||||||
1. Once the mergeback has been merged to `main` and the "Merge releases/v2 into releases/v1" PR has been merged to `releases/v1`, the release is complete.
|
|
||||||
|
|
||||||
## Keeping the PR checks up to date (admin access required)
|
## Keeping the PR checks up to date (admin access required)
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# CodeQL Action
|
# CodeQL Action
|
||||||
|
|
||||||
This action runs GitHub's industry-leading semantic code analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
This action runs GitHub's industry-leading semantic code analysis engine, [CodeQL](https://codeql.github.com/), against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||||
|
|
||||||
For a list of recent changes, see the CodeQL Action's [changelog](CHANGELOG.md).
|
For a list of recent changes, see the CodeQL Action's [changelog](CHANGELOG.md).
|
||||||
|
|
||||||
|
|||||||
30
lib/analyze.js
generated
30
lib/analyze.js
generated
@@ -126,6 +126,7 @@ async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger)
|
|||||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger, featureEnablement) {
|
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger, featureEnablement) {
|
||||||
const statusReport = {};
|
const statusReport = {};
|
||||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
|
const queryFlags = [memoryFlag, threadsFlag];
|
||||||
await util.logCodeScanningConfigInCli(codeql, featureEnablement, logger);
|
await util.logCodeScanningConfigInCli(codeql, featureEnablement, logger);
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
const queries = config.queries[language];
|
const queries = config.queries[language];
|
||||||
@@ -140,7 +141,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
// another to interpret the results.
|
// another to interpret the results.
|
||||||
logger.startGroup(`Running queries for ${language}`);
|
logger.startGroup(`Running queries for ${language}`);
|
||||||
const startTimeBuiltIn = new Date().getTime();
|
const startTimeBuiltIn = new Date().getTime();
|
||||||
await runQueryGroup(language, "all", undefined, undefined);
|
await runQueryGroup(language, "all", undefined, undefined, true);
|
||||||
// TODO should not be using `builtin` here. We should be using `all` instead.
|
// TODO should not be using `builtin` here. We should be using `all` instead.
|
||||||
// The status report does not support `all` yet.
|
// The status report does not support `all` yet.
|
||||||
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
||||||
@@ -164,24 +165,29 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
!hasPackWithCustomQueries) {
|
!hasPackWithCustomQueries) {
|
||||||
throw new Error(`Unable to analyze ${language} as no queries were selected for this language`);
|
throw new Error(`Unable to analyze ${language} as no queries were selected for this language`);
|
||||||
}
|
}
|
||||||
|
const customQueryIndices = [];
|
||||||
|
for (let i = 0; i < queries.custom.length; ++i) {
|
||||||
|
if (queries.custom[i].queries.length > 0) {
|
||||||
|
customQueryIndices.push(i);
|
||||||
|
}
|
||||||
|
}
|
||||||
logger.startGroup(`Running queries for ${language}`);
|
logger.startGroup(`Running queries for ${language}`);
|
||||||
const querySuitePaths = [];
|
const querySuitePaths = [];
|
||||||
if (queries["builtin"].length > 0) {
|
if (queries.builtin.length > 0) {
|
||||||
const startTimeBuiltIn = new Date().getTime();
|
const startTimeBuiltIn = new Date().getTime();
|
||||||
querySuitePaths.push((await runQueryGroup(language, "builtin", createQuerySuiteContents(queries["builtin"], queryFilters), undefined)));
|
querySuitePaths.push((await runQueryGroup(language, "builtin", createQuerySuiteContents(queries.builtin, queryFilters), undefined, customQueryIndices.length === 0 && packsWithVersion.length === 0)));
|
||||||
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
||||||
new Date().getTime() - startTimeBuiltIn;
|
new Date().getTime() - startTimeBuiltIn;
|
||||||
}
|
}
|
||||||
const startTimeCustom = new Date().getTime();
|
const startTimeCustom = new Date().getTime();
|
||||||
let ranCustom = false;
|
let ranCustom = false;
|
||||||
for (let i = 0; i < queries["custom"].length; ++i) {
|
for (const i of customQueryIndices) {
|
||||||
if (queries["custom"][i].queries.length > 0) {
|
querySuitePaths.push((await runQueryGroup(language, `custom-${i}`, createQuerySuiteContents(queries.custom[i].queries, queryFilters), queries.custom[i].searchPath, i === customQueryIndices[customQueryIndices.length - 1] &&
|
||||||
querySuitePaths.push((await runQueryGroup(language, `custom-${i}`, createQuerySuiteContents(queries["custom"][i].queries, queryFilters), queries["custom"][i].searchPath)));
|
packsWithVersion.length === 0)));
|
||||||
ranCustom = true;
|
ranCustom = true;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
if (packsWithVersion.length > 0) {
|
if (packsWithVersion.length > 0) {
|
||||||
querySuitePaths.push(await runQueryPacks(language, "packs", packsWithVersion, queryFilters));
|
querySuitePaths.push(await runQueryPacks(language, "packs", packsWithVersion, queryFilters, true));
|
||||||
ranCustom = true;
|
ranCustom = true;
|
||||||
}
|
}
|
||||||
if (ranCustom) {
|
if (ranCustom) {
|
||||||
@@ -218,7 +224,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
return await codeql.databasePrintBaseline(databasePath);
|
return await codeql.databasePrintBaseline(databasePath);
|
||||||
}
|
}
|
||||||
async function runQueryGroup(language, type, querySuiteContents, searchPath) {
|
async function runQueryGroup(language, type, querySuiteContents, searchPath, optimizeForLastQueryRun) {
|
||||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
// Pass the queries to codeql using a file instead of using the command
|
// Pass the queries to codeql using a file instead of using the command
|
||||||
// line to avoid command line length restrictions, particularly on windows.
|
// line to avoid command line length restrictions, particularly on windows.
|
||||||
@@ -229,11 +235,11 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
fs.writeFileSync(querySuitePath, querySuiteContents);
|
fs.writeFileSync(querySuitePath, querySuiteContents);
|
||||||
logger.debug(`Query suite file for ${language}-${type}...\n${querySuiteContents}`);
|
logger.debug(`Query suite file for ${language}-${type}...\n${querySuiteContents}`);
|
||||||
}
|
}
|
||||||
await codeql.databaseRunQueries(databasePath, searchPath, querySuitePath, memoryFlag, threadsFlag);
|
await codeql.databaseRunQueries(databasePath, searchPath, querySuitePath, queryFlags, optimizeForLastQueryRun);
|
||||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||||
return querySuitePath;
|
return querySuitePath;
|
||||||
}
|
}
|
||||||
async function runQueryPacks(language, type, packs, queryFilters) {
|
async function runQueryPacks(language, type, packs, queryFilters, optimizeForLastQueryRun) {
|
||||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
for (const pack of packs) {
|
for (const pack of packs) {
|
||||||
logger.debug(`Running query pack for ${language}-${type}: ${pack}`);
|
logger.debug(`Running query pack for ${language}-${type}: ${pack}`);
|
||||||
@@ -243,7 +249,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
const querySuitePath = `${databasePath}-queries-${type}.qls`;
|
const querySuitePath = `${databasePath}-queries-${type}.qls`;
|
||||||
fs.writeFileSync(querySuitePath, yaml.dump(querySuite));
|
fs.writeFileSync(querySuitePath, yaml.dump(querySuite));
|
||||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||||
await codeql.databaseRunQueries(databasePath, undefined, querySuitePath, memoryFlag, threadsFlag);
|
await codeql.databaseRunQueries(databasePath, undefined, querySuitePath, queryFlags, optimizeForLastQueryRun);
|
||||||
return querySuitePath;
|
return querySuitePath;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
122
lib/analyze.test.js
generated
122
lib/analyze.test.js
generated
@@ -30,8 +30,10 @@ const fs = __importStar(require("fs"));
|
|||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
const yaml = __importStar(require("js-yaml"));
|
||||||
|
const sinon = __importStar(require("sinon"));
|
||||||
const analyze_1 = require("./analyze");
|
const analyze_1 = require("./analyze");
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
@@ -188,6 +190,126 @@ const util = __importStar(require("./util"));
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
function mockCodeQL() {
|
||||||
|
return {
|
||||||
|
getVersion: async () => "2.12.2",
|
||||||
|
databaseRunQueries: sinon.spy(),
|
||||||
|
databaseInterpretResults: async () => "",
|
||||||
|
databasePrintBaseline: async () => "",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function createBaseConfig(tmpDir) {
|
||||||
|
return {
|
||||||
|
languages: [],
|
||||||
|
queries: {},
|
||||||
|
pathsIgnore: [],
|
||||||
|
paths: [],
|
||||||
|
originalUserInput: {},
|
||||||
|
tempDir: "tempDir",
|
||||||
|
codeQLCmd: "",
|
||||||
|
gitHubVersion: {
|
||||||
|
type: util.GitHubVariant.DOTCOM,
|
||||||
|
},
|
||||||
|
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||||
|
packs: {},
|
||||||
|
debugMode: false,
|
||||||
|
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||||
|
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||||
|
augmentationProperties: {
|
||||||
|
injectedMlQueries: false,
|
||||||
|
packsInputCombines: false,
|
||||||
|
queriesInputCombines: false,
|
||||||
|
},
|
||||||
|
trapCaches: {},
|
||||||
|
trapCacheDownloadTime: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function createQueryConfig(builtin, custom) {
|
||||||
|
return {
|
||||||
|
builtin,
|
||||||
|
custom: custom.map((c) => ({ searchPath: "/search", queries: [c] })),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
async function runQueriesWithConfig(config, features) {
|
||||||
|
for (const language of config.languages) {
|
||||||
|
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||||
|
recursive: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return (0, analyze_1.runQueries)("sarif-folder", "--memFlag", "--addSnippetsFlag", "--threadsFlag", undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)(features));
|
||||||
|
}
|
||||||
|
function getDatabaseRunQueriesCalls(mock) {
|
||||||
|
return mock.databaseRunQueries.getCalls();
|
||||||
|
}
|
||||||
|
(0, ava_1.default)("optimizeForLastQueryRun for one language", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const codeql = mockCodeQL();
|
||||||
|
(0, codeql_1.setCodeQL)(codeql);
|
||||||
|
const config = createBaseConfig(tmpDir);
|
||||||
|
config.languages = [languages_1.Language.cpp];
|
||||||
|
config.queries.cpp = createQueryConfig(["foo.ql"], []);
|
||||||
|
await runQueriesWithConfig(config, []);
|
||||||
|
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [true]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("optimizeForLastQueryRun for two languages", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const codeql = mockCodeQL();
|
||||||
|
(0, codeql_1.setCodeQL)(codeql);
|
||||||
|
const config = createBaseConfig(tmpDir);
|
||||||
|
config.languages = [languages_1.Language.cpp, languages_1.Language.java];
|
||||||
|
config.queries.cpp = createQueryConfig(["foo.ql"], []);
|
||||||
|
config.queries.java = createQueryConfig(["bar.ql"], []);
|
||||||
|
await runQueriesWithConfig(config, []);
|
||||||
|
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [true, true]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("optimizeForLastQueryRun for two languages, with custom queries", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const codeql = mockCodeQL();
|
||||||
|
(0, codeql_1.setCodeQL)(codeql);
|
||||||
|
const config = createBaseConfig(tmpDir);
|
||||||
|
config.languages = [languages_1.Language.cpp, languages_1.Language.java];
|
||||||
|
config.queries.cpp = createQueryConfig(["foo.ql"], ["c1.ql", "c2.ql"]);
|
||||||
|
config.queries.java = createQueryConfig(["bar.ql"], ["c3.ql"]);
|
||||||
|
await runQueriesWithConfig(config, []);
|
||||||
|
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [false, false, true, false, true]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("optimizeForLastQueryRun for two languages, with custom queries and packs", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const codeql = mockCodeQL();
|
||||||
|
(0, codeql_1.setCodeQL)(codeql);
|
||||||
|
const config = createBaseConfig(tmpDir);
|
||||||
|
config.languages = [languages_1.Language.cpp, languages_1.Language.java];
|
||||||
|
config.queries.cpp = createQueryConfig(["foo.ql"], ["c1.ql", "c2.ql"]);
|
||||||
|
config.queries.java = createQueryConfig(["bar.ql"], ["c3.ql"]);
|
||||||
|
config.packs.cpp = ["a/cpp-pack1@0.1.0"];
|
||||||
|
config.packs.java = ["b/java-pack1@0.2.0", "b/java-pack2@0.3.3"];
|
||||||
|
await runQueriesWithConfig(config, []);
|
||||||
|
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [false, false, false, true, false, false, true]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("optimizeForLastQueryRun for one language, CliConfigFileEnabled", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const codeql = mockCodeQL();
|
||||||
|
(0, codeql_1.setCodeQL)(codeql);
|
||||||
|
const config = createBaseConfig(tmpDir);
|
||||||
|
config.languages = [languages_1.Language.cpp];
|
||||||
|
await runQueriesWithConfig(config, [feature_flags_1.Feature.CliConfigFileEnabled]);
|
||||||
|
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [true]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("optimizeForLastQueryRun for two languages, CliConfigFileEnabled", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const codeql = mockCodeQL();
|
||||||
|
(0, codeql_1.setCodeQL)(codeql);
|
||||||
|
const config = createBaseConfig(tmpDir);
|
||||||
|
config.languages = [languages_1.Language.cpp, languages_1.Language.java];
|
||||||
|
await runQueriesWithConfig(config, [feature_flags_1.Feature.CliConfigFileEnabled]);
|
||||||
|
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [true, true]);
|
||||||
|
});
|
||||||
|
});
|
||||||
(0, ava_1.default)("validateQueryFilters", (t) => {
|
(0, ava_1.default)("validateQueryFilters", (t) => {
|
||||||
t.notThrows(() => (0, analyze_1.validateQueryFilters)([]));
|
t.notThrows(() => (0, analyze_1.validateQueryFilters)([]));
|
||||||
t.notThrows(() => (0, analyze_1.validateQueryFilters)(undefined));
|
t.notThrows(() => (0, analyze_1.validateQueryFilters)(undefined));
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
46
lib/codeql.js
generated
46
lib/codeql.js
generated
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getExtraOptions = exports.getCodeQLForCmd = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.setupCodeQL = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CommandInvocationError = void 0;
|
exports.getExtraOptions = exports.getCodeQLForCmd = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.setupCodeQL = exports.CODEQL_VERSION_INIT_WITH_QLCONFIG = exports.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CommandInvocationError = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
@@ -94,6 +94,14 @@ exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = "2.9.0";
|
|||||||
* --extractor-options-verbosity that we need.
|
* --extractor-options-verbosity that we need.
|
||||||
*/
|
*/
|
||||||
exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = "2.10.3";
|
exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = "2.10.3";
|
||||||
|
/**
|
||||||
|
* Versions 2.11.1+ of the CodeQL Bundle include a `security-experimental` built-in query suite for each language.
|
||||||
|
*/
|
||||||
|
exports.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE = "2.12.1";
|
||||||
|
/**
|
||||||
|
* Versions 2.12.4+ of the CodeQL CLI support the `--qlconfig-file` flag in calls to `database init`.
|
||||||
|
*/
|
||||||
|
exports.CODEQL_VERSION_INIT_WITH_QLCONFIG = "2.12.4";
|
||||||
/**
|
/**
|
||||||
* Set up CodeQL CLI access.
|
* Set up CodeQL CLI access.
|
||||||
*
|
*
|
||||||
@@ -101,16 +109,15 @@ exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = "2.10.3";
|
|||||||
* @param apiDetails
|
* @param apiDetails
|
||||||
* @param tempDir
|
* @param tempDir
|
||||||
* @param variant
|
* @param variant
|
||||||
* @param bypassToolcache
|
|
||||||
* @param defaultCliVersion
|
* @param defaultCliVersion
|
||||||
* @param logger
|
* @param logger
|
||||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||||
* version requirement. Must be set to true outside tests.
|
* version requirement. Must be set to true outside tests.
|
||||||
* @returns a { CodeQL, toolsVersion } object.
|
* @returns a { CodeQL, toolsVersion } object.
|
||||||
*/
|
*/
|
||||||
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger, checkVersion) {
|
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger, checkVersion) {
|
||||||
try {
|
try {
|
||||||
const { codeqlFolder, toolsDownloadDurationMs, toolsSource, toolsVersion } = await setupCodeql.setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger);
|
const { codeqlFolder, toolsDownloadDurationMs, toolsSource, toolsVersion } = await setupCodeql.setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger);
|
||||||
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
|
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
|
||||||
if (process.platform === "win32") {
|
if (process.platform === "win32") {
|
||||||
codeqlCmd += ".exe";
|
codeqlCmd += ".exe";
|
||||||
@@ -299,7 +306,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
...getExtraOptionsFromEnv(["database", "init"]),
|
...getExtraOptionsFromEnv(["database", "init"]),
|
||||||
]);
|
]);
|
||||||
},
|
},
|
||||||
async databaseInitCluster(config, sourceRoot, processName, featureEnablement, logger) {
|
async databaseInitCluster(config, sourceRoot, processName, featureEnablement, qlconfigFile, logger) {
|
||||||
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
||||||
if (config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l)).length > 0) {
|
if (config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l)).length > 0) {
|
||||||
extraArgs.push("--begin-tracing");
|
extraArgs.push("--begin-tracing");
|
||||||
@@ -317,17 +324,21 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
extraArgs.push("--no-internal-use-lua-tracing");
|
extraArgs.push("--no-internal-use-lua-tracing");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// A config file is only generated if the CliConfigFileEnabled feature flag is enabled.
|
// A code scanning config file is only generated if the CliConfigFileEnabled feature flag is enabled.
|
||||||
const configLocation = await generateCodeScanningConfig(codeql, config, featureEnablement, logger);
|
const codeScanningConfigFile = await generateCodeScanningConfig(codeql, config, featureEnablement, logger);
|
||||||
// Only pass external repository token if a config file is going to be parsed by the CLI.
|
// Only pass external repository token if a config file is going to be parsed by the CLI.
|
||||||
let externalRepositoryToken;
|
let externalRepositoryToken;
|
||||||
if (configLocation) {
|
if (codeScanningConfigFile) {
|
||||||
extraArgs.push(`--codescanning-config=${configLocation}`);
|
|
||||||
externalRepositoryToken = (0, actions_util_1.getOptionalInput)("external-repository-token");
|
externalRepositoryToken = (0, actions_util_1.getOptionalInput)("external-repository-token");
|
||||||
|
extraArgs.push(`--codescanning-config=${codeScanningConfigFile}`);
|
||||||
if (externalRepositoryToken) {
|
if (externalRepositoryToken) {
|
||||||
extraArgs.push("--external-repository-token-stdin");
|
extraArgs.push("--external-repository-token-stdin");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (qlconfigFile !== undefined &&
|
||||||
|
(await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_INIT_WITH_QLCONFIG))) {
|
||||||
|
extraArgs.push(`--qlconfig-file=${qlconfigFile}`);
|
||||||
|
}
|
||||||
await runTool(cmd, [
|
await runTool(cmd, [
|
||||||
"database",
|
"database",
|
||||||
"init",
|
"init",
|
||||||
@@ -469,17 +480,20 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
throw new Error(`Unexpected output from codeql resolve queries: ${e}`);
|
throw new Error(`Unexpected output from codeql resolve queries: ${e}`);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
async databaseRunQueries(databasePath, extraSearchPath, querySuitePath, memoryFlag, threadsFlag) {
|
async databaseRunQueries(databasePath, extraSearchPath, querySuitePath, flags, optimizeForLastQueryRun) {
|
||||||
const codeqlArgs = [
|
const codeqlArgs = [
|
||||||
"database",
|
"database",
|
||||||
"run-queries",
|
"run-queries",
|
||||||
memoryFlag,
|
...flags,
|
||||||
threadsFlag,
|
|
||||||
databasePath,
|
databasePath,
|
||||||
"--min-disk-free=1024",
|
"--min-disk-free=1024",
|
||||||
"-v",
|
"-v",
|
||||||
...getExtraOptionsFromEnv(["database", "run-queries"]),
|
...getExtraOptionsFromEnv(["database", "run-queries"]),
|
||||||
];
|
];
|
||||||
|
if (optimizeForLastQueryRun &&
|
||||||
|
(await util.supportExpectDiscardedCache(this))) {
|
||||||
|
codeqlArgs.push("--expect-discarded-cache");
|
||||||
|
}
|
||||||
if (extraSearchPath !== undefined) {
|
if (extraSearchPath !== undefined) {
|
||||||
codeqlArgs.push("--additional-packs", extraSearchPath);
|
codeqlArgs.push("--additional-packs", extraSearchPath);
|
||||||
}
|
}
|
||||||
@@ -712,7 +726,7 @@ async function generateCodeScanningConfig(codeql, config, featureEnablement, log
|
|||||||
if (!(await util.useCodeScanningConfigInCli(codeql, featureEnablement))) {
|
if (!(await util.useCodeScanningConfigInCli(codeql, featureEnablement))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const configLocation = path.resolve(config.tempDir, "user-config.yaml");
|
const codeScanningConfigFile = path.resolve(config.tempDir, "user-config.yaml");
|
||||||
// make a copy so we can modify it
|
// make a copy so we can modify it
|
||||||
const augmentedConfig = cloneObject(config.originalUserInput);
|
const augmentedConfig = cloneObject(config.originalUserInput);
|
||||||
// Inject the queries from the input
|
// Inject the queries from the input
|
||||||
@@ -766,12 +780,12 @@ async function generateCodeScanningConfig(codeql, config, featureEnablement, log
|
|||||||
augmentedConfig.packs["javascript"].push(packString);
|
augmentedConfig.packs["javascript"].push(packString);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
logger.info(`Writing augmented user configuration file to ${configLocation}`);
|
logger.info(`Writing augmented user configuration file to ${codeScanningConfigFile}`);
|
||||||
logger.startGroup("Augmented user configuration file contents");
|
logger.startGroup("Augmented user configuration file contents");
|
||||||
logger.info(yaml.dump(augmentedConfig));
|
logger.info(yaml.dump(augmentedConfig));
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
fs.writeFileSync(configLocation, yaml.dump(augmentedConfig));
|
fs.writeFileSync(codeScanningConfigFile, yaml.dump(augmentedConfig));
|
||||||
return configLocation;
|
return codeScanningConfigFile;
|
||||||
}
|
}
|
||||||
function cloneObject(obj) {
|
function cloneObject(obj) {
|
||||||
return JSON.parse(JSON.stringify(obj));
|
return JSON.parse(JSON.stringify(obj));
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
205
lib/codeql.test.js
generated
205
lib/codeql.test.js
generated
@@ -97,7 +97,7 @@ ava_1.default.beforeEach(() => {
|
|||||||
* @returns the download URL for the bundle. This can be passed to the tools parameter of
|
* @returns the download URL for the bundle. This can be passed to the tools parameter of
|
||||||
* `codeql.setupCodeQL`.
|
* `codeql.setupCodeQL`.
|
||||||
*/
|
*/
|
||||||
function mockDownloadApi({ apiDetails = sampleApiDetails, isPinned, tagName, }) {
|
function mockDownloadApi({ apiDetails = sampleApiDetails, isPinned, repo = "github/codeql-action", platformSpecific = true, tagName, }) {
|
||||||
const platform = process.platform === "win32"
|
const platform = process.platform === "win32"
|
||||||
? "win64"
|
? "win64"
|
||||||
: process.platform === "linux"
|
: process.platform === "linux"
|
||||||
@@ -105,7 +105,7 @@ function mockDownloadApi({ apiDetails = sampleApiDetails, isPinned, tagName, })
|
|||||||
: "osx64";
|
: "osx64";
|
||||||
const baseUrl = apiDetails?.url ?? "https://example.com";
|
const baseUrl = apiDetails?.url ?? "https://example.com";
|
||||||
const relativeUrl = apiDetails
|
const relativeUrl = apiDetails
|
||||||
? `/github/codeql-action/releases/download/${tagName}/codeql-bundle-${platform}.tar.gz`
|
? `/${repo}/releases/download/${tagName}/codeql-bundle${platformSpecific ? `-${platform}` : ""}.tar.gz`
|
||||||
: `/download/${tagName}/codeql-bundle.tar.gz`;
|
: `/download/${tagName}/codeql-bundle.tar.gz`;
|
||||||
(0, nock_1.default)(baseUrl)
|
(0, nock_1.default)(baseUrl)
|
||||||
.get(relativeUrl)
|
.get(relativeUrl)
|
||||||
@@ -114,7 +114,7 @@ function mockDownloadApi({ apiDetails = sampleApiDetails, isPinned, tagName, })
|
|||||||
}
|
}
|
||||||
async function installIntoToolcache({ apiDetails = sampleApiDetails, cliVersion, isPinned, tagName, tmpDir, }) {
|
async function installIntoToolcache({ apiDetails = sampleApiDetails, cliVersion, isPinned, tagName, tmpDir, }) {
|
||||||
const url = mockDownloadApi({ apiDetails, isPinned, tagName });
|
const url = mockDownloadApi({ apiDetails, isPinned, tagName });
|
||||||
await codeql.setupCodeQL(cliVersion !== undefined ? undefined : url, apiDetails, tmpDir, util.GitHubVariant.GHES, false, cliVersion !== undefined
|
await codeql.setupCodeQL(cliVersion !== undefined ? undefined : url, apiDetails, tmpDir, util.GitHubVariant.GHES, cliVersion !== undefined
|
||||||
? { cliVersion, tagName, variant: util.GitHubVariant.GHES }
|
? { cliVersion, tagName, variant: util.GitHubVariant.GHES }
|
||||||
: SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
: SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
}
|
}
|
||||||
@@ -153,11 +153,11 @@ function mockApiDetails(apiDetails) {
|
|||||||
tagName: `codeql-bundle-${version}`,
|
tagName: `codeql-bundle-${version}`,
|
||||||
isPinned: false,
|
isPinned: false,
|
||||||
});
|
});
|
||||||
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
||||||
t.is(result.toolsVersion, `0.0.0-${version}`);
|
t.is(result.toolsVersion, `0.0.0-${version}`);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
||||||
t.is(typeof result.toolsDownloadDurationMs, "number");
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
}
|
}
|
||||||
t.is(toolcache.findAllVersions("CodeQL").length, 2);
|
t.is(toolcache.findAllVersions("CodeQL").length, 2);
|
||||||
});
|
});
|
||||||
@@ -173,11 +173,11 @@ function mockApiDetails(apiDetails) {
|
|||||||
const url = mockDownloadApi({
|
const url = mockDownloadApi({
|
||||||
tagName: "codeql-bundle-20200610",
|
tagName: "codeql-bundle-20200610",
|
||||||
});
|
});
|
||||||
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
||||||
t.deepEqual(result.toolsVersion, "0.0.0-20200610");
|
t.deepEqual(result.toolsVersion, "0.0.0-20200610");
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
||||||
t.not(result.toolsDownloadDurationMs, undefined);
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
const EXPLICITLY_REQUESTED_BUNDLE_TEST_CASES = [
|
const EXPLICITLY_REQUESTED_BUNDLE_TEST_CASES = [
|
||||||
@@ -207,75 +207,59 @@ for (const { cliVersion, expectedToolcacheVersion, } of EXPLICITLY_REQUESTED_BUN
|
|||||||
const url = mockDownloadApi({
|
const url = mockDownloadApi({
|
||||||
tagName: "codeql-bundle-20200610",
|
tagName: "codeql-bundle-20200610",
|
||||||
});
|
});
|
||||||
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.assert(releaseApiMock.isDone(), "Releases API should have been called");
|
t.assert(releaseApiMock.isDone(), "Releases API should have been called");
|
||||||
t.assert(toolcache.find("CodeQL", expectedToolcacheVersion));
|
t.assert(toolcache.find("CodeQL", expectedToolcacheVersion));
|
||||||
t.deepEqual(result.toolsVersion, cliVersion);
|
t.deepEqual(result.toolsVersion, cliVersion);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
||||||
t.not(result.toolsDownloadDurationMs, undefined);
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
for (const { isCached, tagName, toolcacheCliVersion } of [
|
for (const { githubReleases, toolcacheVersion } of [
|
||||||
|
// Test that we use the tools from the toolcache when `SAMPLE_DEFAULT_CLI_VERSION` is requested
|
||||||
|
// and `SAMPLE_DEFAULT_CLI_VERSION-` is in the toolcache.
|
||||||
{
|
{
|
||||||
isCached: true,
|
toolcacheVersion: SAMPLE_DEFAULT_CLI_VERSION.cliVersion,
|
||||||
tagName: "codeql-bundle-20230101",
|
|
||||||
toolcacheCliVersion: SAMPLE_DEFAULT_CLI_VERSION.cliVersion,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
isCached: true,
|
githubReleases: {
|
||||||
// By leaving toolcacheCliVersion undefined, the bundle will be installed
|
"codeql-bundle-20230101": `cli-version-${SAMPLE_DEFAULT_CLI_VERSION.cliVersion}.txt`,
|
||||||
// into the toolcache as `${SAMPLE_DEFAULT_CLI_VERSION.cliVersion}-20230101`.
|
},
|
||||||
// This lets us test that `x.y.z-yyyymmdd` toolcache versions are used if an
|
toolcacheVersion: "0.0.0-20230101",
|
||||||
// `x.y.z` version isn't in the toolcache.
|
|
||||||
tagName: `codeql-bundle-${SAMPLE_DEFAULT_CLI_VERSION.cliVersion}-20230101`,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
isCached: false,
|
toolcacheVersion: `${SAMPLE_DEFAULT_CLI_VERSION.cliVersion}-20230101`,
|
||||||
tagName: "codeql-bundle-20230101",
|
|
||||||
},
|
},
|
||||||
]) {
|
]) {
|
||||||
(0, ava_1.default)(`uses default version on Dotcom when default version bundle ${tagName} is ${isCached ? "" : "not "}cached`, async (t) => {
|
(0, ava_1.default)(`uses tools from toolcache when ${SAMPLE_DEFAULT_CLI_VERSION.cliVersion} is requested and ` +
|
||||||
|
`${toolcacheVersion} is installed`, async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
if (isCached) {
|
sinon
|
||||||
await installIntoToolcache({
|
.stub(toolcache, "find")
|
||||||
cliVersion: toolcacheCliVersion,
|
.withArgs("CodeQL", toolcacheVersion)
|
||||||
tagName,
|
.returns("path/to/cached/codeql");
|
||||||
isPinned: true,
|
sinon.stub(toolcache, "findAllVersions").returns([toolcacheVersion]);
|
||||||
tmpDir,
|
if (githubReleases) {
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
mockDownloadApi({
|
|
||||||
tagName,
|
|
||||||
});
|
|
||||||
sinon.stub(api, "getApiClient").value(() => ({
|
sinon.stub(api, "getApiClient").value(() => ({
|
||||||
repos: {
|
repos: {
|
||||||
listReleases: sinon.stub().resolves(undefined),
|
listReleases: sinon.stub().resolves(undefined),
|
||||||
},
|
},
|
||||||
paginate: sinon.stub().resolves([
|
paginate: sinon.stub().resolves(Object.entries(githubReleases).map(([releaseTagName, cliVersionMarkerFile]) => ({
|
||||||
{
|
|
||||||
assets: [
|
assets: [
|
||||||
{
|
{
|
||||||
name: "cli-version-2.0.0.txt",
|
name: cliVersionMarkerFile,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
tag_name: tagName,
|
tag_name: releaseTagName,
|
||||||
},
|
}))),
|
||||||
]),
|
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.is(result.toolsVersion, SAMPLE_DEFAULT_CLI_VERSION.cliVersion);
|
t.is(result.toolsVersion, SAMPLE_DEFAULT_CLI_VERSION.cliVersion);
|
||||||
if (isCached) {
|
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Toolcache);
|
t.is(result.toolsSource, init_1.ToolsSource.Toolcache);
|
||||||
t.is(result.toolsDownloadDurationMs, undefined);
|
t.is(result.toolsDownloadDurationMs, undefined);
|
||||||
}
|
|
||||||
else {
|
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
|
||||||
t.is(typeof result.toolsDownloadDurationMs, "number");
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -288,7 +272,7 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
isPinned: true,
|
isPinned: true,
|
||||||
tmpDir,
|
tmpDir,
|
||||||
});
|
});
|
||||||
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, variant, false, {
|
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, variant, {
|
||||||
cliVersion: defaults.cliVersion,
|
cliVersion: defaults.cliVersion,
|
||||||
tagName: defaults.bundleVersion,
|
tagName: defaults.bundleVersion,
|
||||||
variant,
|
variant,
|
||||||
@@ -311,14 +295,14 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
mockDownloadApi({
|
mockDownloadApi({
|
||||||
tagName: defaults.bundleVersion,
|
tagName: defaults.bundleVersion,
|
||||||
});
|
});
|
||||||
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, variant, false, {
|
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, variant, {
|
||||||
cliVersion: defaults.cliVersion,
|
cliVersion: defaults.cliVersion,
|
||||||
tagName: defaults.bundleVersion,
|
tagName: defaults.bundleVersion,
|
||||||
variant,
|
variant,
|
||||||
}, (0, logging_1.getRunnerLogger)(true), false);
|
}, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
||||||
t.is(typeof result.toolsDownloadDurationMs, "number");
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
});
|
});
|
||||||
@@ -335,15 +319,19 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
mockDownloadApi({
|
mockDownloadApi({
|
||||||
tagName: defaults.bundleVersion,
|
tagName: defaults.bundleVersion,
|
||||||
});
|
});
|
||||||
const result = await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
||||||
t.is(typeof result.toolsDownloadDurationMs, "number");
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("download codeql bundle from github ae endpoint", async (t) => {
|
for (const isBundleVersionInUrl of [true, false]) {
|
||||||
|
const inclusionString = isBundleVersionInUrl
|
||||||
|
? "includes"
|
||||||
|
: "does not include";
|
||||||
|
(0, ava_1.default)(`download codeql bundle from github ae endpoint (URL ${inclusionString} bundle version)`, async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
const bundleAssetID = 10;
|
const bundleAssetID = 10;
|
||||||
@@ -353,6 +341,9 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
? "linux64"
|
? "linux64"
|
||||||
: "osx64";
|
: "osx64";
|
||||||
const codeQLBundleName = `codeql-bundle-${platform}.tar.gz`;
|
const codeQLBundleName = `codeql-bundle-${platform}.tar.gz`;
|
||||||
|
const eventualDownloadUrl = isBundleVersionInUrl
|
||||||
|
? `https://example.githubenterprise.com/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`
|
||||||
|
: `https://example.githubenterprise.com/api/v3/repos/github/codeql-action/releases/assets/${bundleAssetID}`;
|
||||||
(0, nock_1.default)("https://example.githubenterprise.com")
|
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||||
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/find/${defaults.bundleVersion}`)
|
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/find/${defaults.bundleVersion}`)
|
||||||
.reply(200, {
|
.reply(200, {
|
||||||
@@ -361,25 +352,50 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
(0, nock_1.default)("https://example.githubenterprise.com")
|
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||||
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/download/${bundleAssetID}`)
|
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/download/${bundleAssetID}`)
|
||||||
.reply(200, {
|
.reply(200, {
|
||||||
url: `https://example.githubenterprise.com/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`,
|
url: eventualDownloadUrl,
|
||||||
});
|
});
|
||||||
(0, nock_1.default)("https://example.githubenterprise.com")
|
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
.get(eventualDownloadUrl.replace("https://example.githubenterprise.com", ""))
|
||||||
.replyWithFile(200, path_1.default.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
.replyWithFile(200, path_1.default.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
mockApiDetails(sampleGHAEApiDetails);
|
mockApiDetails(sampleGHAEApiDetails);
|
||||||
sinon.stub(actionsUtil, "isRunningLocalAction").returns(false);
|
sinon.stub(actionsUtil, "isRunningLocalAction").returns(false);
|
||||||
process.env["GITHUB_ACTION_REPOSITORY"] = "github/codeql-action";
|
process.env["GITHUB_ACTION_REPOSITORY"] = "github/codeql-action";
|
||||||
const result = await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, util.GitHubVariant.GHAE, false, {
|
const result = await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, util.GitHubVariant.GHAE, {
|
||||||
cliVersion: defaults.cliVersion,
|
cliVersion: defaults.cliVersion,
|
||||||
tagName: defaults.bundleVersion,
|
tagName: defaults.bundleVersion,
|
||||||
variant: util.GitHubVariant.GHAE,
|
variant: util.GitHubVariant.GHAE,
|
||||||
}, (0, logging_1.getRunnerLogger)(true), false);
|
}, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
||||||
t.is(typeof result.toolsDownloadDurationMs, "number");
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 1);
|
t.is(cachedVersions.length, 1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
(0, ava_1.default)("bundle URL from another repo is cached as 0.0.0-bundleVersion", async (t) => {
|
||||||
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
|
mockApiDetails(sampleApiDetails);
|
||||||
|
sinon.stub(actionsUtil, "isRunningLocalAction").returns(true);
|
||||||
|
const releasesApiMock = mockReleaseApi({
|
||||||
|
assetNames: ["cli-version-2.12.2.txt"],
|
||||||
|
tagName: "codeql-bundle-20230203",
|
||||||
|
});
|
||||||
|
mockDownloadApi({
|
||||||
|
repo: "dsp-testing/codeql-cli-nightlies",
|
||||||
|
platformSpecific: false,
|
||||||
|
tagName: "codeql-bundle-20230203",
|
||||||
|
});
|
||||||
|
const result = await codeql.setupCodeQL("https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-20230203/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
|
t.is(result.toolsVersion, "0.0.0-20230203");
|
||||||
|
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
||||||
|
t.true(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
|
t.is(cachedVersions.length, 1);
|
||||||
|
t.is(cachedVersions[0], "0.0.0-20230203");
|
||||||
|
t.false(releasesApiMock.isDone());
|
||||||
|
});
|
||||||
|
});
|
||||||
(0, ava_1.default)("getExtraOptions works for explicit paths", (t) => {
|
(0, ava_1.default)("getExtraOptions works for explicit paths", (t) => {
|
||||||
t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []);
|
t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []);
|
||||||
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]);
|
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]);
|
||||||
@@ -436,11 +452,11 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
packsInputCombines: false,
|
packsInputCombines: false,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
|
||||||
const args = runnerConstructorStub.firstCall.args[1];
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
// should NOT have used an config file
|
// should NOT have used an config file
|
||||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||||
t.falsy(configArg, "Should have injected a codescanning config");
|
t.falsy(configArg, "Should NOT have injected a codescanning config");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
// Test macro for ensuring different variants of injected augmented configurations
|
// Test macro for ensuring different variants of injected augmented configurations
|
||||||
@@ -458,7 +474,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
|||||||
tempDir,
|
tempDir,
|
||||||
augmentationProperties,
|
augmentationProperties,
|
||||||
};
|
};
|
||||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), (0, logging_1.getRunnerLogger)(true));
|
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), undefined, (0, logging_1.getRunnerLogger)(true));
|
||||||
const args = runnerConstructorStub.firstCall.args[1];
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
// should have used an config file
|
// should have used an config file
|
||||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||||
@@ -649,24 +665,67 @@ const injectedConfigMacro = ava_1.default.macro({
|
|||||||
queries: [],
|
queries: [],
|
||||||
},
|
},
|
||||||
}, {});
|
}, {});
|
||||||
(0, ava_1.default)("does not use injected config", async (t) => {
|
(0, ava_1.default)("does not pass a code scanning config or qlconfig file to the CLI when CLI config passing is disabled", async (t) => {
|
||||||
const origCODEQL_PASS_CONFIG_TO_CLI = process.env.CODEQL_PASS_CONFIG_TO_CLI;
|
await util.withTmpDir(async (tempDir) => {
|
||||||
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = "false";
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
try {
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
|
// stubbed version doesn't matter. It just needs to be valid semver.
|
||||||
|
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
||||||
|
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, (0, testing_utils_1.createFeatures)([]), "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
|
||||||
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
|
// should not have used a config file
|
||||||
|
const hasConfigArg = args.some((arg) => arg.startsWith("--codescanning-config="));
|
||||||
|
t.false(hasConfigArg, "Should NOT have injected a codescanning config");
|
||||||
|
// should not have passed a qlconfig file
|
||||||
|
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
|
||||||
|
t.false(hasQlconfigArg, "Should NOT have passed a qlconfig file");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("passes a code scanning config AND qlconfig to the CLI when CLI config passing is enabled", async (t) => {
|
||||||
|
await util.withTmpDir(async (tempDir) => {
|
||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
sinon
|
sinon
|
||||||
.stub(codeqlObject, "getVersion")
|
.stub(codeqlObject, "getVersion")
|
||||||
.resolves(feature_flags_1.featureConfig[feature_flags_1.Feature.CliConfigFileEnabled].minimumVersion);
|
.resolves(codeql.CODEQL_VERSION_INIT_WITH_QLCONFIG);
|
||||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
|
||||||
const args = runnerConstructorStub.firstCall.args[1];
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
// should have used an config file
|
// should have used a config file
|
||||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
const hasCodeScanningConfigArg = args.some((arg) => arg.startsWith("--codescanning-config="));
|
||||||
t.falsy(configArg, "Should NOT have injected a codescanning config");
|
t.true(hasCodeScanningConfigArg, "Should have injected a qlconfig");
|
||||||
}
|
// should have passed a qlconfig file
|
||||||
finally {
|
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
|
||||||
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = origCODEQL_PASS_CONFIG_TO_CLI;
|
t.truthy(hasQlconfigArg, "Should have injected a codescanning config");
|
||||||
}
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("passes a code scanning config BUT NOT a qlconfig to the CLI when CLI config passing is enabled", async (t) => {
|
||||||
|
await util.withTmpDir(async (tempDir) => {
|
||||||
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
|
sinon.stub(codeqlObject, "getVersion").resolves("2.12.2");
|
||||||
|
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
|
||||||
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
|
// should have used a config file
|
||||||
|
const hasCodeScanningConfigArg = args.some((arg) => arg.startsWith("--codescanning-config="));
|
||||||
|
t.true(hasCodeScanningConfigArg, "Should have injected a codescanning config");
|
||||||
|
// should not have passed a qlconfig file
|
||||||
|
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
|
||||||
|
t.false(hasQlconfigArg, "should NOT have injected a qlconfig");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("does not pass a qlconfig to the CLI when it is undefined", async (t) => {
|
||||||
|
await util.withTmpDir(async (tempDir) => {
|
||||||
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
|
sinon
|
||||||
|
.stub(codeqlObject, "getVersion")
|
||||||
|
.resolves(codeql.CODEQL_VERSION_INIT_WITH_QLCONFIG);
|
||||||
|
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), undefined, // undefined qlconfigFile
|
||||||
|
(0, logging_1.getRunnerLogger)(true));
|
||||||
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
|
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
|
||||||
|
t.false(hasQlconfigArg, "should NOT have injected a qlconfig");
|
||||||
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info for 2.11.3", async (t) => {
|
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info for 2.11.3", async (t) => {
|
||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
87
lib/config-utils.js
generated
87
lib/config-utils.js
generated
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.downloadPacks = exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePackSpecification = exports.prettyPrintPack = exports.parsePacksSpecification = exports.parsePacksFromConfig = exports.calculateAugmentation = exports.getDefaultConfig = exports.getRawLanguages = exports.getLanguages = exports.getLanguagesInRepo = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesMissingUses = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = exports.defaultAugmentationProperties = void 0;
|
exports.wrapEnvironment = exports.generateRegistries = exports.downloadPacks = exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePackSpecification = exports.prettyPrintPack = exports.parsePacksSpecification = exports.parsePacksFromConfig = exports.calculateAugmentation = exports.getDefaultConfig = exports.getRawLanguages = exports.getLanguages = exports.getLanguagesInRepo = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesMissingUses = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = exports.defaultAugmentationProperties = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const perf_hooks_1 = require("perf_hooks");
|
const perf_hooks_1 = require("perf_hooks");
|
||||||
@@ -131,7 +131,11 @@ async function addDefaultQueries(codeQL, languages, resultMap) {
|
|||||||
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
||||||
}
|
}
|
||||||
// The set of acceptable values for built-in suites from the codeql bundle
|
// The set of acceptable values for built-in suites from the codeql bundle
|
||||||
const builtinSuites = ["security-extended", "security-and-quality"];
|
const builtinSuites = [
|
||||||
|
"security-experimental",
|
||||||
|
"security-extended",
|
||||||
|
"security-and-quality",
|
||||||
|
];
|
||||||
/**
|
/**
|
||||||
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
||||||
* Throws an error if suiteName is not a valid builtin suite.
|
* Throws an error if suiteName is not a valid builtin suite.
|
||||||
@@ -143,6 +147,12 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
|
|||||||
if (!found) {
|
if (!found) {
|
||||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||||
}
|
}
|
||||||
|
if (suiteName === "security-experimental" &&
|
||||||
|
!(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE))) {
|
||||||
|
throw new Error(`The 'security-experimental' suite is not supported on CodeQL CLI versions earlier than
|
||||||
|
${codeql_1.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE}. Please upgrade to CodeQL CLI version
|
||||||
|
${codeql_1.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE} or later.`);
|
||||||
|
}
|
||||||
// If we're running the JavaScript security-extended analysis (or a superset of it), the repo is
|
// If we're running the JavaScript security-extended analysis (or a superset of it), the repo is
|
||||||
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
|
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
|
||||||
// pack, then add the ML-powered query pack so that we run ML-powered queries.
|
// pack, then add the ML-powered query pack so that we run ML-powered queries.
|
||||||
@@ -151,7 +161,9 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
|
|||||||
(process.platform !== "win32" ||
|
(process.platform !== "win32" ||
|
||||||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS))) &&
|
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS))) &&
|
||||||
languages.includes("javascript") &&
|
languages.includes("javascript") &&
|
||||||
(found === "security-extended" || found === "security-and-quality") &&
|
(found === "security-experimental" ||
|
||||||
|
found === "security-extended" ||
|
||||||
|
found === "security-and-quality") &&
|
||||||
!packs.javascript?.some(isMlPoweredJsQueriesPack) &&
|
!packs.javascript?.some(isMlPoweredJsQueriesPack) &&
|
||||||
(await featureEnablement.getValue(feature_flags_1.Feature.MlPoweredQueriesEnabled, codeQL))) {
|
(await featureEnablement.getValue(feature_flags_1.Feature.MlPoweredQueriesEnabled, codeQL))) {
|
||||||
if (!packs.javascript) {
|
if (!packs.javascript) {
|
||||||
@@ -713,7 +725,7 @@ function parseQueriesFromInput(rawQueriesInput, queriesInputCombines) {
|
|||||||
}
|
}
|
||||||
const trimmedInput = queriesInputCombines
|
const trimmedInput = queriesInputCombines
|
||||||
? rawQueriesInput.trim().slice(1).trim()
|
? rawQueriesInput.trim().slice(1).trim()
|
||||||
: rawQueriesInput?.trim();
|
: rawQueriesInput?.trim() ?? "";
|
||||||
if (queriesInputCombines && trimmedInput.length === 0) {
|
if (queriesInputCombines && trimmedInput.length === 0) {
|
||||||
throw new Error(getConfigFilePropertyError(undefined, "queries", "A '+' was used in the 'queries' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs."));
|
throw new Error(getConfigFilePropertyError(undefined, "queries", "A '+' was used in the 'queries' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs."));
|
||||||
}
|
}
|
||||||
@@ -893,7 +905,8 @@ exports.parsePacks = parsePacks;
|
|||||||
* Without a '+', an input value will override the corresponding value in the config file.
|
* Without a '+', an input value will override the corresponding value in the config file.
|
||||||
*
|
*
|
||||||
* @param inputValue The input value to process.
|
* @param inputValue The input value to process.
|
||||||
* @returns true if the input value should replace the corresponding value in the config file, false if it should be appended.
|
* @returns true if the input value should replace the corresponding value in the config file,
|
||||||
|
* false if it should be appended.
|
||||||
*/
|
*/
|
||||||
function shouldCombine(inputValue) {
|
function shouldCombine(inputValue) {
|
||||||
return !!inputValue?.trim().startsWith("+");
|
return !!inputValue?.trim().startsWith("+");
|
||||||
@@ -946,8 +959,7 @@ async function initConfig(languagesInput, queriesInput, packsInput, registriesIn
|
|||||||
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const registries = parseRegistries(registriesInput);
|
await downloadPacks(codeQL, config.languages, config.packs, apiDetails, registriesInput, config.tempDir, logger);
|
||||||
await downloadPacks(codeQL, config.languages, config.packs, registries, apiDetails, config.tempDir, logger);
|
|
||||||
}
|
}
|
||||||
// Save the config so we can easily access it again in the future
|
// Save the config so we can easily access it again in the future
|
||||||
await saveConfig(config, logger);
|
await saveConfig(config, logger);
|
||||||
@@ -1043,21 +1055,9 @@ async function getConfig(tempDir, logger) {
|
|||||||
return JSON.parse(configString);
|
return JSON.parse(configString);
|
||||||
}
|
}
|
||||||
exports.getConfig = getConfig;
|
exports.getConfig = getConfig;
|
||||||
async function downloadPacks(codeQL, languages, packs, registries, apiDetails, tmpDir, logger) {
|
async function downloadPacks(codeQL, languages, packs, apiDetails, registriesInput, tempDir, logger) {
|
||||||
let qlconfigFile;
|
// This code path is only used when config parsing occurs in the Action.
|
||||||
let registriesAuthTokens;
|
const { registriesAuthTokens, qlconfigFile } = await generateRegistries(registriesInput, codeQL, tempDir, logger);
|
||||||
if (registries) {
|
|
||||||
if (!(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD))) {
|
|
||||||
throw new Error(`'registries' input is not supported on CodeQL versions less than ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD}.`);
|
|
||||||
}
|
|
||||||
// generate a qlconfig.yml file to hold the registry configs.
|
|
||||||
const qlconfig = createRegistriesBlock(registries);
|
|
||||||
qlconfigFile = path.join(tmpDir, "qlconfig.yml");
|
|
||||||
fs.writeFileSync(qlconfigFile, yaml.dump(qlconfig), "utf8");
|
|
||||||
registriesAuthTokens = registries
|
|
||||||
.map((registry) => `${registry.url}=${registry.token}`)
|
|
||||||
.join(",");
|
|
||||||
}
|
|
||||||
await wrapEnvironment({
|
await wrapEnvironment({
|
||||||
GITHUB_TOKEN: apiDetails.auth,
|
GITHUB_TOKEN: apiDetails.auth,
|
||||||
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
|
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
|
||||||
@@ -1085,6 +1085,48 @@ async function downloadPacks(codeQL, languages, packs, registries, apiDetails, t
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadPacks = downloadPacks;
|
exports.downloadPacks = downloadPacks;
|
||||||
|
/**
|
||||||
|
* Generate a `qlconfig.yml` file from the `registries` input.
|
||||||
|
* This file is used by the CodeQL CLI to list the registries to use for each
|
||||||
|
* pack.
|
||||||
|
*
|
||||||
|
* @param registriesInput The value of the `registries` input.
|
||||||
|
* @param codeQL a codeQL object, used only for checking the version of CodeQL.
|
||||||
|
* @param tempDir a temporary directory to store the generated qlconfig.yml file.
|
||||||
|
* @param logger a logger object.
|
||||||
|
* @returns The path to the generated `qlconfig.yml` file and the auth tokens to
|
||||||
|
* use for each registry.
|
||||||
|
*/
|
||||||
|
async function generateRegistries(registriesInput, codeQL, tempDir, logger) {
|
||||||
|
const registries = parseRegistries(registriesInput);
|
||||||
|
let registriesAuthTokens;
|
||||||
|
let qlconfigFile;
|
||||||
|
if (registries) {
|
||||||
|
if (!(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD))) {
|
||||||
|
throw new Error(`The 'registries' input is not supported on CodeQL CLI versions earlier than ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD}. Please upgrade to CodeQL CLI version ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD} or later.`);
|
||||||
|
}
|
||||||
|
// generate a qlconfig.yml file to hold the registry configs.
|
||||||
|
const qlconfig = createRegistriesBlock(registries);
|
||||||
|
qlconfigFile = path.join(tempDir, "qlconfig.yml");
|
||||||
|
const qlconfigContents = yaml.dump(qlconfig);
|
||||||
|
fs.writeFileSync(qlconfigFile, qlconfigContents, "utf8");
|
||||||
|
logger.debug("Generated qlconfig.yml:");
|
||||||
|
logger.debug(qlconfigContents);
|
||||||
|
registriesAuthTokens = registries
|
||||||
|
.map((registry) => `${registry.url}=${registry.token}`)
|
||||||
|
.join(",");
|
||||||
|
}
|
||||||
|
if (typeof process.env.CODEQL_REGISTRIES_AUTH === "string") {
|
||||||
|
logger.debug("Using CODEQL_REGISTRIES_AUTH environment variable to authenticate with registries.");
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
registriesAuthTokens:
|
||||||
|
// if the user has explicitly set the CODEQL_REGISTRIES_AUTH env var then use that
|
||||||
|
process.env.CODEQL_REGISTRIES_AUTH ?? registriesAuthTokens,
|
||||||
|
qlconfigFile,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
exports.generateRegistries = generateRegistries;
|
||||||
function createRegistriesBlock(registries) {
|
function createRegistriesBlock(registries) {
|
||||||
if (!Array.isArray(registries) ||
|
if (!Array.isArray(registries) ||
|
||||||
registries.some((r) => !r.url || !r.packages)) {
|
registries.some((r) => !r.url || !r.packages)) {
|
||||||
@@ -1134,4 +1176,5 @@ async function wrapEnvironment(env, operation) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
exports.wrapEnvironment = wrapEnvironment;
|
||||||
//# sourceMappingURL=config-utils.js.map
|
//# sourceMappingURL=config-utils.js.map
|
||||||
File diff suppressed because one or more lines are too long
85
lib/config-utils.test.js
generated
85
lib/config-utils.test.js
generated
@@ -1014,7 +1014,7 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
|||||||
// Test that the ~0.1.0 version of ML-powered queries is run on v2.8.3 of the CLI.
|
// Test that the ~0.1.0 version of ML-powered queries is run on v2.8.3 of the CLI.
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.3", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.1.0");
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.3", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.1.0");
|
||||||
// Test that ML-powered queries aren't run when the user hasn't specified that we should run the
|
// Test that ML-powered queries aren't run when the user hasn't specified that we should run the
|
||||||
// `security-extended` or `security-and-quality` query suite.
|
// `security-extended`, `security-and-quality`, or `security-experimental` query suite.
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
|
||||||
// Test that ML-powered queries are run on non-Windows platforms running `security-extended` on
|
// Test that ML-powered queries are run on non-Windows platforms running `security-extended` on
|
||||||
// versions of the CodeQL CLI prior to 2.9.0.
|
// versions of the CodeQL CLI prior to 2.9.0.
|
||||||
@@ -1042,6 +1042,9 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
|||||||
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
|
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
|
||||||
// CLI 2.11.3+.
|
// CLI 2.11.3+.
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.11.3", true, undefined, "security-and-quality", "~0.4.0");
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.11.3", true, undefined, "security-and-quality", "~0.4.0");
|
||||||
|
// Test that ML-powered queries are run on all platforms running `security-experimental` on CodeQL
|
||||||
|
// CLI 2.12.1+.
|
||||||
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.12.1", true, undefined, "security-experimental", "~0.4.0");
|
||||||
const calculateAugmentationMacro = ava_1.default.macro({
|
const calculateAugmentationMacro = ava_1.default.macro({
|
||||||
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedAugmentationProperties) => {
|
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedAugmentationProperties) => {
|
||||||
const actualAugmentationProperties = configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, languages);
|
const actualAugmentationProperties = configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, languages);
|
||||||
@@ -1111,8 +1114,8 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
java: ["a", "b"],
|
java: ["a", "b"],
|
||||||
go: ["c", "d"],
|
go: ["c", "d"],
|
||||||
python: ["e", "f"],
|
python: ["e", "f"],
|
||||||
}, undefined, // registries
|
}, sampleApiDetails, undefined, // registriesAuthTokens
|
||||||
sampleApiDetails, tmpDir, logger);
|
tmpDir, logger);
|
||||||
// Expecting packs to be downloaded once for java and once for python
|
// Expecting packs to be downloaded once for java and once for python
|
||||||
t.deepEqual(packDownloadStub.callCount, 2);
|
t.deepEqual(packDownloadStub.callCount, 2);
|
||||||
// no config file was created, so pass `undefined` as the config file path
|
// no config file was created, so pass `undefined` as the config file path
|
||||||
@@ -1125,9 +1128,9 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
// associated env vars
|
// associated env vars
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
process.env.GITHUB_TOKEN = "not-a-token";
|
process.env.GITHUB_TOKEN = "not-a-token";
|
||||||
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
process.env.CODEQL_REGISTRIES_AUTH = undefined;
|
||||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
const registries = [
|
const registriesInput = yaml.dump([
|
||||||
{
|
{
|
||||||
// no slash
|
// no slash
|
||||||
url: "http://ghcr.io",
|
url: "http://ghcr.io",
|
||||||
@@ -1140,8 +1143,9 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
packages: "semmle/*",
|
packages: "semmle/*",
|
||||||
token: "still-not-a-token",
|
token: "still-not-a-token",
|
||||||
},
|
},
|
||||||
];
|
]);
|
||||||
// append a slash to the first url
|
// append a slash to the first url
|
||||||
|
const registries = yaml.load(registriesInput);
|
||||||
const expectedRegistries = registries.map((r, i) => ({
|
const expectedRegistries = registries.map((r, i) => ({
|
||||||
packages: r.packages,
|
packages: r.packages,
|
||||||
url: i === 0 ? `${r.url}/` : r.url,
|
url: i === 0 ? `${r.url}/` : r.url,
|
||||||
@@ -1170,7 +1174,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
java: ["a", "b"],
|
java: ["a", "b"],
|
||||||
go: ["c", "d"],
|
go: ["c", "d"],
|
||||||
python: ["e", "f"],
|
python: ["e", "f"],
|
||||||
}, registries, sampleApiDetails, tmpDir, logger);
|
}, sampleApiDetails, registriesInput, tmpDir, logger);
|
||||||
// Same packs are downloaded as in previous test
|
// Same packs are downloaded as in previous test
|
||||||
t.deepEqual(packDownloadStub.callCount, 2);
|
t.deepEqual(packDownloadStub.callCount, 2);
|
||||||
t.deepEqual(packDownloadStub.firstCall.args, [
|
t.deepEqual(packDownloadStub.firstCall.args, [
|
||||||
@@ -1183,7 +1187,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
]);
|
]);
|
||||||
// Verify that the env vars were unset.
|
// Verify that the env vars were unset.
|
||||||
t.deepEqual(process.env.GITHUB_TOKEN, "not-a-token");
|
t.deepEqual(process.env.GITHUB_TOKEN, "not-a-token");
|
||||||
t.deepEqual(process.env.CODEQL_REGISTRIES_AUTH, "not-a-registries-auth");
|
t.deepEqual(process.env.CODEQL_REGISTRIES_AUTH, undefined);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("downloadPacks-with-registries fails on 2.10.3", async (t) => {
|
(0, ava_1.default)("downloadPacks-with-registries fails on 2.10.3", async (t) => {
|
||||||
@@ -1193,7 +1197,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
process.env.GITHUB_TOKEN = "not-a-token";
|
process.env.GITHUB_TOKEN = "not-a-token";
|
||||||
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
||||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
const registries = [
|
const registriesInput = yaml.dump([
|
||||||
{
|
{
|
||||||
url: "http://ghcr.io",
|
url: "http://ghcr.io",
|
||||||
packages: ["codeql/*", "dsp-testing/*"],
|
packages: ["codeql/*", "dsp-testing/*"],
|
||||||
@@ -1204,12 +1208,12 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
packages: "semmle/*",
|
packages: "semmle/*",
|
||||||
token: "still-not-a-token",
|
token: "still-not-a-token",
|
||||||
},
|
},
|
||||||
];
|
]);
|
||||||
const codeQL = (0, codeql_1.setCodeQL)({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
getVersion: () => Promise.resolve("2.10.3"),
|
getVersion: () => Promise.resolve("2.10.3"),
|
||||||
});
|
});
|
||||||
await t.throwsAsync(async () => {
|
await t.throwsAsync(async () => {
|
||||||
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, registries, sampleApiDetails, tmpDir, logger);
|
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, sampleApiDetails, registriesInput, tmpDir, logger);
|
||||||
}, { instanceOf: Error }, "'registries' input is not supported on CodeQL versions less than 2.10.4.");
|
}, { instanceOf: Error }, "'registries' input is not supported on CodeQL versions less than 2.10.4.");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -1220,7 +1224,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
process.env.GITHUB_TOKEN = "not-a-token";
|
process.env.GITHUB_TOKEN = "not-a-token";
|
||||||
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
||||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
const registries = [
|
const registriesInput = yaml.dump([
|
||||||
{
|
{
|
||||||
// missing url property
|
// missing url property
|
||||||
packages: ["codeql/*", "dsp-testing/*"],
|
packages: ["codeql/*", "dsp-testing/*"],
|
||||||
@@ -1231,15 +1235,68 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
packages: "semmle/*",
|
packages: "semmle/*",
|
||||||
token: "still-not-a-token",
|
token: "still-not-a-token",
|
||||||
},
|
},
|
||||||
];
|
]);
|
||||||
const codeQL = (0, codeql_1.setCodeQL)({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
getVersion: () => Promise.resolve("2.10.4"),
|
getVersion: () => Promise.resolve("2.10.4"),
|
||||||
});
|
});
|
||||||
await t.throwsAsync(async () => {
|
await t.throwsAsync(async () => {
|
||||||
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, registries, sampleApiDetails, tmpDir, logger);
|
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, sampleApiDetails, registriesInput, tmpDir, logger);
|
||||||
}, { instanceOf: Error }, "Invalid 'registries' input. Must be an array of objects with 'url' and 'packages' properties.");
|
}, { instanceOf: Error }, "Invalid 'registries' input. Must be an array of objects with 'url' and 'packages' properties.");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
// the happy path for generateRegistries is already tested in downloadPacks.
|
||||||
|
// these following tests are for the error cases and when nothing is generated.
|
||||||
|
(0, ava_1.default)("no generateRegistries when CLI is too old", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const registriesInput = yaml.dump([
|
||||||
|
{
|
||||||
|
// no slash
|
||||||
|
url: "http://ghcr.io",
|
||||||
|
packages: ["codeql/*", "dsp-testing/*"],
|
||||||
|
token: "not-a-token",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
|
// Accepted CLI versions are 2.10.4 or higher
|
||||||
|
getVersion: () => Promise.resolve("2.10.3"),
|
||||||
|
});
|
||||||
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
|
await t.throwsAsync(async () => await configUtils.generateRegistries(registriesInput, codeQL, tmpDir, logger), undefined, "'registries' input is not supported on CodeQL versions less than 2.10.4.");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("no generateRegistries when registries is undefined", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const registriesInput = undefined;
|
||||||
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
|
// Accepted CLI versions are 2.10.4 or higher
|
||||||
|
getVersion: () => Promise.resolve(codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD),
|
||||||
|
});
|
||||||
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
|
const { registriesAuthTokens, qlconfigFile } = await configUtils.generateRegistries(registriesInput, codeQL, tmpDir, logger);
|
||||||
|
t.is(registriesAuthTokens, undefined);
|
||||||
|
t.is(qlconfigFile, undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("generateRegistries prefers original CODEQL_REGISTRIES_AUTH", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env.CODEQL_REGISTRIES_AUTH = "original";
|
||||||
|
const registriesInput = yaml.dump([
|
||||||
|
{
|
||||||
|
url: "http://ghcr.io",
|
||||||
|
packages: ["codeql/*", "dsp-testing/*"],
|
||||||
|
token: "not-a-token",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
|
// Accepted CLI versions are 2.10.4 or higher
|
||||||
|
getVersion: () => Promise.resolve(codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD),
|
||||||
|
});
|
||||||
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
|
const { registriesAuthTokens, qlconfigFile } = await configUtils.generateRegistries(registriesInput, codeQL, tmpDir, logger);
|
||||||
|
t.is(registriesAuthTokens, "original");
|
||||||
|
t.is(qlconfigFile, path.join(tmpDir, "qlconfig.yml"));
|
||||||
|
});
|
||||||
|
});
|
||||||
// getLanguages
|
// getLanguages
|
||||||
const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
||||||
// eslint-disable-next-line github/array-foreach
|
// eslint-disable-next-line github/array-foreach
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-20230120",
|
"bundleVersion": "codeql-bundle-20230304",
|
||||||
"cliVersion": "2.12.1",
|
"cliVersion": "2.12.4",
|
||||||
"priorBundleVersion": "codeql-bundle-20230105",
|
"priorBundleVersion": "codeql-bundle-20230217",
|
||||||
"priorCliVersion": "2.12.0"
|
"priorCliVersion": "2.12.3"
|
||||||
}
|
}
|
||||||
|
|||||||
52
lib/feature-flags.js
generated
52
lib/feature-flags.js
generated
@@ -34,46 +34,31 @@ const DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_";
|
|||||||
const DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled";
|
const DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled";
|
||||||
var Feature;
|
var Feature;
|
||||||
(function (Feature) {
|
(function (Feature) {
|
||||||
Feature["BypassToolcacheEnabled"] = "bypass_toolcache_enabled";
|
|
||||||
Feature["BypassToolcacheKotlinSwiftEnabled"] = "bypass_toolcache_kotlin_swift_enabled";
|
|
||||||
Feature["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
Feature["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
||||||
Feature["DisableKotlinAnalysisEnabled"] = "disable_kotlin_analysis_enabled";
|
Feature["DisableKotlinAnalysisEnabled"] = "disable_kotlin_analysis_enabled";
|
||||||
Feature["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
Feature["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||||
Feature["TrapCachingEnabled"] = "trap_caching_enabled";
|
|
||||||
Feature["UploadFailedSarifEnabled"] = "upload_failed_sarif_enabled";
|
Feature["UploadFailedSarifEnabled"] = "upload_failed_sarif_enabled";
|
||||||
})(Feature = exports.Feature || (exports.Feature = {}));
|
})(Feature = exports.Feature || (exports.Feature = {}));
|
||||||
exports.featureConfig = {
|
exports.featureConfig = {
|
||||||
[Feature.BypassToolcacheEnabled]: {
|
|
||||||
envVar: "CODEQL_BYPASS_TOOLCACHE",
|
|
||||||
// Cannot specify a minimum version because this flag is checked before we have
|
|
||||||
// access to the CodeQL instance.
|
|
||||||
minimumVersion: undefined,
|
|
||||||
},
|
|
||||||
[Feature.BypassToolcacheKotlinSwiftEnabled]: {
|
|
||||||
envVar: "CODEQL_BYPASS_TOOLCACHE_KOTLIN_SWIFT",
|
|
||||||
// Cannot specify a minimum version because this flag is checked before we have
|
|
||||||
// access to the CodeQL instance.
|
|
||||||
minimumVersion: undefined,
|
|
||||||
},
|
|
||||||
[Feature.DisableKotlinAnalysisEnabled]: {
|
[Feature.DisableKotlinAnalysisEnabled]: {
|
||||||
envVar: "CODEQL_DISABLE_KOTLIN_ANALYSIS",
|
envVar: "CODEQL_DISABLE_KOTLIN_ANALYSIS",
|
||||||
minimumVersion: undefined,
|
minimumVersion: undefined,
|
||||||
|
defaultValue: false,
|
||||||
},
|
},
|
||||||
[Feature.CliConfigFileEnabled]: {
|
[Feature.CliConfigFileEnabled]: {
|
||||||
envVar: "CODEQL_PASS_CONFIG_TO_CLI",
|
envVar: "CODEQL_PASS_CONFIG_TO_CLI",
|
||||||
minimumVersion: "2.11.6",
|
minimumVersion: "2.11.6",
|
||||||
|
defaultValue: true,
|
||||||
},
|
},
|
||||||
[Feature.MlPoweredQueriesEnabled]: {
|
[Feature.MlPoweredQueriesEnabled]: {
|
||||||
envVar: "CODEQL_ML_POWERED_QUERIES",
|
envVar: "CODEQL_ML_POWERED_QUERIES",
|
||||||
minimumVersion: "2.7.5",
|
minimumVersion: "2.7.5",
|
||||||
},
|
defaultValue: false,
|
||||||
[Feature.TrapCachingEnabled]: {
|
|
||||||
envVar: "CODEQL_TRAP_CACHING",
|
|
||||||
minimumVersion: undefined,
|
|
||||||
},
|
},
|
||||||
[Feature.UploadFailedSarifEnabled]: {
|
[Feature.UploadFailedSarifEnabled]: {
|
||||||
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
|
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
|
||||||
minimumVersion: "2.11.3",
|
minimumVersion: "2.11.3",
|
||||||
|
defaultValue: false,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
exports.FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
|
exports.FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
|
||||||
@@ -105,10 +90,6 @@ class Features {
|
|||||||
if (!codeql && exports.featureConfig[feature].minimumVersion) {
|
if (!codeql && exports.featureConfig[feature].minimumVersion) {
|
||||||
throw new Error(`Internal error: A minimum version is specified for feature ${feature}, but no instance of CodeQL was provided.`);
|
throw new Error(`Internal error: A minimum version is specified for feature ${feature}, but no instance of CodeQL was provided.`);
|
||||||
}
|
}
|
||||||
// Bypassing the toolcache is disabled in test mode.
|
|
||||||
if (feature === Feature.BypassToolcacheEnabled && util.isInTestMode()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const envVar = (process.env[exports.featureConfig[feature].envVar] || "").toLocaleLowerCase();
|
const envVar = (process.env[exports.featureConfig[feature].envVar] || "").toLocaleLowerCase();
|
||||||
// Do not use this feature if user explicitly disables it via an environment variable.
|
// Do not use this feature if user explicitly disables it via an environment variable.
|
||||||
if (envVar === "false") {
|
if (envVar === "false") {
|
||||||
@@ -126,7 +107,8 @@ class Features {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
// Ask the GitHub API if the feature is enabled.
|
// Ask the GitHub API if the feature is enabled.
|
||||||
return await this.gitHubFeatureFlags.getValue(feature);
|
return ((await this.gitHubFeatureFlags.getValue(feature)) ??
|
||||||
|
exports.featureConfig[feature].defaultValue);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.Features = Features;
|
exports.Features = Features;
|
||||||
@@ -136,7 +118,7 @@ class GitHubFeatureFlags {
|
|||||||
this.repositoryNwo = repositoryNwo;
|
this.repositoryNwo = repositoryNwo;
|
||||||
this.featureFlagsFile = featureFlagsFile;
|
this.featureFlagsFile = featureFlagsFile;
|
||||||
this.logger = logger;
|
this.logger = logger;
|
||||||
/**/
|
this.hasAccessedRemoteFeatureFlags = false; // Not accessed by default.
|
||||||
}
|
}
|
||||||
getCliVersionFromFeatureFlag(f) {
|
getCliVersionFromFeatureFlag(f) {
|
||||||
if (!f.startsWith(DEFAULT_VERSION_FEATURE_FLAG_PREFIX) ||
|
if (!f.startsWith(DEFAULT_VERSION_FEATURE_FLAG_PREFIX) ||
|
||||||
@@ -157,7 +139,9 @@ class GitHubFeatureFlags {
|
|||||||
const defaultDotComCliVersion = await this.getDefaultDotcomCliVersion();
|
const defaultDotComCliVersion = await this.getDefaultDotcomCliVersion();
|
||||||
return {
|
return {
|
||||||
cliVersion: defaultDotComCliVersion.version,
|
cliVersion: defaultDotComCliVersion.version,
|
||||||
toolsFeatureFlagsValid: defaultDotComCliVersion.toolsFeatureFlagsValid,
|
toolsFeatureFlagsValid: this.hasAccessedRemoteFeatureFlags
|
||||||
|
? defaultDotComCliVersion.toolsFeatureFlagsValid
|
||||||
|
: undefined,
|
||||||
variant,
|
variant,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -188,7 +172,9 @@ class GitHubFeatureFlags {
|
|||||||
`shipped with the Action. This is ${defaults.cliVersion}.`);
|
`shipped with the Action. This is ${defaults.cliVersion}.`);
|
||||||
return {
|
return {
|
||||||
version: defaults.cliVersion,
|
version: defaults.cliVersion,
|
||||||
toolsFeatureFlagsValid: false,
|
toolsFeatureFlagsValid: this.hasAccessedRemoteFeatureFlags
|
||||||
|
? false
|
||||||
|
: undefined,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
const maxCliVersion = enabledFeatureFlagCliVersions.reduce((maxVersion, currentVersion) => currentVersion > maxVersion ? currentVersion : maxVersion, enabledFeatureFlagCliVersions[0]);
|
const maxCliVersion = enabledFeatureFlagCliVersions.reduce((maxVersion, currentVersion) => currentVersion > maxVersion ? currentVersion : maxVersion, enabledFeatureFlagCliVersions[0]);
|
||||||
@@ -198,13 +184,13 @@ class GitHubFeatureFlags {
|
|||||||
async getValue(feature) {
|
async getValue(feature) {
|
||||||
const response = await this.getAllFeatures();
|
const response = await this.getAllFeatures();
|
||||||
if (response === undefined) {
|
if (response === undefined) {
|
||||||
this.logger.debug(`No feature flags API response for ${feature}, considering it disabled.`);
|
this.logger.debug(`No feature flags API response for ${feature}.`);
|
||||||
return false;
|
return undefined;
|
||||||
}
|
}
|
||||||
const featureEnablement = response[feature];
|
const featureEnablement = response[feature];
|
||||||
if (featureEnablement === undefined) {
|
if (featureEnablement === undefined) {
|
||||||
this.logger.debug(`Feature '${feature}' undefined in API response, considering it disabled.`);
|
this.logger.debug(`Feature '${feature}' undefined in API response.`);
|
||||||
return false;
|
return undefined;
|
||||||
}
|
}
|
||||||
return !!featureEnablement;
|
return !!featureEnablement;
|
||||||
}
|
}
|
||||||
@@ -255,6 +241,7 @@ class GitHubFeatureFlags {
|
|||||||
// Do nothing when not running against github.com
|
// Do nothing when not running against github.com
|
||||||
if (this.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
|
if (this.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
|
||||||
this.logger.debug("Not running against github.com. Disabling all toggleable features.");
|
this.logger.debug("Not running against github.com. Disabling all toggleable features.");
|
||||||
|
this.hasAccessedRemoteFeatureFlags = false;
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
@@ -265,6 +252,7 @@ class GitHubFeatureFlags {
|
|||||||
const remoteFlags = response.data;
|
const remoteFlags = response.data;
|
||||||
this.logger.debug("Loaded the following default values for the feature flags from the Code Scanning API: " +
|
this.logger.debug("Loaded the following default values for the feature flags from the Code Scanning API: " +
|
||||||
`${JSON.stringify(remoteFlags)}`);
|
`${JSON.stringify(remoteFlags)}`);
|
||||||
|
this.hasAccessedRemoteFeatureFlags = true;
|
||||||
return remoteFlags;
|
return remoteFlags;
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
@@ -273,6 +261,7 @@ class GitHubFeatureFlags {
|
|||||||
"As a result, it will not be opted into any experimental features. " +
|
"As a result, it will not be opted into any experimental features. " +
|
||||||
"This could be because the Action is running on a pull request from a fork. If not, " +
|
"This could be because the Action is running on a pull request from a fork. If not, " +
|
||||||
`please ensure the Action has the 'security-events: write' permission. Details: ${e}`);
|
`please ensure the Action has the 'security-events: write' permission. Details: ${e}`);
|
||||||
|
this.hasAccessedRemoteFeatureFlags = false;
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -283,7 +272,6 @@ class GitHubFeatureFlags {
|
|||||||
throw new Error(`Encountered an error while trying to determine feature enablement: ${e}`);
|
throw new Error(`Encountered an error while trying to determine feature enablement: ${e}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return {};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//# sourceMappingURL=feature-flags.js.map
|
//# sourceMappingURL=feature-flags.js.map
|
||||||
File diff suppressed because one or more lines are too long
12
lib/feature-flags.test.js
generated
12
lib/feature-flags.test.js
generated
@@ -53,7 +53,7 @@ for (const variant of ALL_FEATURES_DISABLED_VARIANTS) {
|
|||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages), variant.gitHubVersion);
|
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages), variant.gitHubVersion);
|
||||||
for (const feature of Object.values(feature_flags_1.Feature)) {
|
for (const feature of Object.values(feature_flags_1.Feature)) {
|
||||||
t.false(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature)));
|
t.deepEqual(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature)), feature_flags_1.featureConfig[feature].defaultValue);
|
||||||
}
|
}
|
||||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
v.message ===
|
v.message ===
|
||||||
@@ -61,24 +61,24 @@ for (const variant of ALL_FEATURES_DISABLED_VARIANTS) {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
(0, ava_1.default)("API response missing", async (t) => {
|
(0, ava_1.default)("API response missing and features use default value", async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(403, {});
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(403, {});
|
||||||
for (const feature of Object.values(feature_flags_1.Feature)) {
|
for (const feature of Object.values(feature_flags_1.Feature)) {
|
||||||
t.assert((await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))) === false);
|
t.assert((await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))) === feature_flags_1.featureConfig[feature].defaultValue);
|
||||||
}
|
}
|
||||||
assertAllFeaturesUndefinedInApi(t, loggedMessages);
|
assertAllFeaturesUndefinedInApi(t, loggedMessages);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("Features are disabled if they're not returned in API response", async (t) => {
|
(0, ava_1.default)("Features use default value if they're not returned in API response", async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||||
for (const feature of Object.values(feature_flags_1.Feature)) {
|
for (const feature of Object.values(feature_flags_1.Feature)) {
|
||||||
t.assert((await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))) === false);
|
t.assert((await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))) === feature_flags_1.featureConfig[feature].defaultValue);
|
||||||
}
|
}
|
||||||
assertAllFeaturesUndefinedInApi(t, loggedMessages);
|
assertAllFeaturesUndefinedInApi(t, loggedMessages);
|
||||||
});
|
});
|
||||||
@@ -283,7 +283,7 @@ function assertAllFeaturesUndefinedInApi(t, loggedMessages) {
|
|||||||
for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
v.message.includes(feature) &&
|
v.message.includes(feature) &&
|
||||||
v.message.includes("considering it disabled")) !== undefined);
|
v.message.includes("undefined in API response")) !== undefined);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
function initializeFeatures(initialValue) {
|
function initializeFeatures(initialValue) {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
26
lib/init-action.js
generated
26
lib/init-action.js
generated
@@ -46,12 +46,13 @@ async function sendInitStatusReport(actionStatus, startedAt, config, toolsDownlo
|
|||||||
tools_source: toolsSource || init_1.ToolsSource.Unknown,
|
tools_source: toolsSource || init_1.ToolsSource.Unknown,
|
||||||
workflow_languages: workflowLanguages || "",
|
workflow_languages: workflowLanguages || "",
|
||||||
};
|
};
|
||||||
let initToolsDownloadFields = {};
|
const initToolsDownloadFields = {};
|
||||||
if (toolsSource === init_1.ToolsSource.Download) {
|
if (toolsDownloadDurationMs !== undefined) {
|
||||||
initToolsDownloadFields = {
|
initToolsDownloadFields.tools_download_duration_ms =
|
||||||
tools_download_duration_ms: toolsDownloadDurationMs,
|
toolsDownloadDurationMs;
|
||||||
tools_feature_flags_valid: toolsFeatureFlagsValid,
|
}
|
||||||
};
|
if (toolsFeatureFlagsValid !== undefined) {
|
||||||
|
initToolsDownloadFields.tools_feature_flags_valid = toolsFeatureFlagsValid;
|
||||||
}
|
}
|
||||||
if (config !== undefined) {
|
if (config !== undefined) {
|
||||||
const languages = config.languages.join(",");
|
const languages = config.languages.join(",");
|
||||||
@@ -112,6 +113,7 @@ async function run() {
|
|||||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
||||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||||
|
const registriesInput = (0, actions_util_1.getOptionalInput)("registries");
|
||||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||||
try {
|
try {
|
||||||
const workflowErrors = await (0, workflow_1.validateWorkflow)();
|
const workflowErrors = await (0, workflow_1.validateWorkflow)();
|
||||||
@@ -122,13 +124,13 @@ async function run() {
|
|||||||
if (codeQLDefaultVersionInfo.variant === util_1.GitHubVariant.DOTCOM) {
|
if (codeQLDefaultVersionInfo.variant === util_1.GitHubVariant.DOTCOM) {
|
||||||
toolsFeatureFlagsValid = codeQLDefaultVersionInfo.toolsFeatureFlagsValid;
|
toolsFeatureFlagsValid = codeQLDefaultVersionInfo.toolsFeatureFlagsValid;
|
||||||
}
|
}
|
||||||
const initCodeQLResult = await (0, init_1.initCodeQL)((0, actions_util_1.getOptionalInput)("tools"), apiDetails, (0, actions_util_1.getTemporaryDirectory)(), gitHubVersion.type, await (0, util_1.shouldBypassToolcache)(features, (0, actions_util_1.getOptionalInput)("tools"), (0, actions_util_1.getOptionalInput)("languages"), repositoryNwo, logger), codeQLDefaultVersionInfo, logger);
|
const initCodeQLResult = await (0, init_1.initCodeQL)((0, actions_util_1.getOptionalInput)("tools"), apiDetails, (0, actions_util_1.getTemporaryDirectory)(), gitHubVersion.type, codeQLDefaultVersionInfo, logger);
|
||||||
codeql = initCodeQLResult.codeql;
|
codeql = initCodeQLResult.codeql;
|
||||||
toolsDownloadDurationMs = initCodeQLResult.toolsDownloadDurationMs;
|
toolsDownloadDurationMs = initCodeQLResult.toolsDownloadDurationMs;
|
||||||
toolsVersion = initCodeQLResult.toolsVersion;
|
toolsVersion = initCodeQLResult.toolsVersion;
|
||||||
toolsSource = initCodeQLResult.toolsSource;
|
toolsSource = initCodeQLResult.toolsSource;
|
||||||
await (0, util_1.enrichEnvironment)(codeql);
|
await (0, util_1.enrichEnvironment)(codeql);
|
||||||
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("registries"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), await getTrapCachingEnabled(features),
|
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), registriesInput, (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), getTrapCachingEnabled(),
|
||||||
// Debug mode is enabled if:
|
// Debug mode is enabled if:
|
||||||
// - The `init` Action is passed `debug: true`.
|
// - The `init` Action is passed `debug: true`.
|
||||||
// - Actions step debugging is enabled (e.g. by [enabling debug logging for a rerun](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-all-the-jobs-in-a-workflow),
|
// - Actions step debugging is enabled (e.g. by [enabling debug logging for a rerun](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-all-the-jobs-in-a-workflow),
|
||||||
@@ -172,7 +174,7 @@ async function run() {
|
|||||||
core.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true");
|
core.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true");
|
||||||
}
|
}
|
||||||
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
|
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
|
||||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", features, logger);
|
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", registriesInput, features, apiDetails, logger);
|
||||||
if (tracerConfig !== undefined) {
|
if (tracerConfig !== undefined) {
|
||||||
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
||||||
core.exportVariable(key, value);
|
core.exportVariable(key, value);
|
||||||
@@ -192,7 +194,7 @@ async function run() {
|
|||||||
}
|
}
|
||||||
await sendInitStatusReport("success", startedAt, config, toolsDownloadDurationMs, toolsFeatureFlagsValid, toolsSource, toolsVersion, logger);
|
await sendInitStatusReport("success", startedAt, config, toolsDownloadDurationMs, toolsFeatureFlagsValid, toolsSource, toolsVersion, logger);
|
||||||
}
|
}
|
||||||
async function getTrapCachingEnabled(featureEnablement) {
|
function getTrapCachingEnabled() {
|
||||||
// If the workflow specified something always respect that
|
// If the workflow specified something always respect that
|
||||||
const trapCaching = (0, actions_util_1.getOptionalInput)("trap-caching");
|
const trapCaching = (0, actions_util_1.getOptionalInput)("trap-caching");
|
||||||
if (trapCaching !== undefined)
|
if (trapCaching !== undefined)
|
||||||
@@ -200,8 +202,8 @@ async function getTrapCachingEnabled(featureEnablement) {
|
|||||||
// On self-hosted runners which may have slow network access, disable TRAP caching by default
|
// On self-hosted runners which may have slow network access, disable TRAP caching by default
|
||||||
if (!(0, util_1.isHostedRunner)())
|
if (!(0, util_1.isHostedRunner)())
|
||||||
return false;
|
return false;
|
||||||
// On hosted runners, respect the feature flag
|
// On hosted runners, enable TRAP caching by default
|
||||||
return await featureEnablement.getValue(feature_flags_1.Feature.TrapCachingEnabled);
|
return true;
|
||||||
}
|
}
|
||||||
async function runWrapper() {
|
async function runWrapper() {
|
||||||
try {
|
try {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
22
lib/init.js
generated
22
lib/init.js
generated
@@ -41,9 +41,9 @@ var ToolsSource;
|
|||||||
ToolsSource["Toolcache"] = "TOOLCACHE";
|
ToolsSource["Toolcache"] = "TOOLCACHE";
|
||||||
ToolsSource["Download"] = "DOWNLOAD";
|
ToolsSource["Download"] = "DOWNLOAD";
|
||||||
})(ToolsSource = exports.ToolsSource || (exports.ToolsSource = {}));
|
})(ToolsSource = exports.ToolsSource || (exports.ToolsSource = {}));
|
||||||
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger) {
|
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
||||||
logger.startGroup("Setup CodeQL tools");
|
logger.startGroup("Setup CodeQL tools");
|
||||||
const { codeql, toolsDownloadDurationMs, toolsSource, toolsVersion } = await (0, codeql_1.setupCodeQL)(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger, true);
|
const { codeql, toolsDownloadDurationMs, toolsSource, toolsVersion } = await (0, codeql_1.setupCodeQL)(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger, true);
|
||||||
await codeql.printVersion();
|
await codeql.printVersion();
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
return { codeql, toolsDownloadDurationMs, toolsSource, toolsVersion };
|
return { codeql, toolsDownloadDurationMs, toolsSource, toolsVersion };
|
||||||
@@ -57,12 +57,26 @@ async function initConfig(languagesInput, queriesInput, packsInput, registriesIn
|
|||||||
return config;
|
return config;
|
||||||
}
|
}
|
||||||
exports.initConfig = initConfig;
|
exports.initConfig = initConfig;
|
||||||
async function runInit(codeql, config, sourceRoot, processName, featureEnablement, logger) {
|
async function runInit(codeql, config, sourceRoot, processName, registriesInput, featureEnablement, apiDetails, logger) {
|
||||||
fs.mkdirSync(config.dbLocation, { recursive: true });
|
fs.mkdirSync(config.dbLocation, { recursive: true });
|
||||||
try {
|
try {
|
||||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||||
|
// When parsing the codeql config in the CLI, we have not yet created the qlconfig file.
|
||||||
|
// So, create it now.
|
||||||
|
// If we are parsing the config file in the Action, then the qlconfig file was already created
|
||||||
|
// before the `pack download` command was invoked. It is not required for the init command.
|
||||||
|
let registriesAuthTokens;
|
||||||
|
let qlconfigFile;
|
||||||
|
if (await util.useCodeScanningConfigInCli(codeql, featureEnablement)) {
|
||||||
|
({ registriesAuthTokens, qlconfigFile } =
|
||||||
|
await configUtils.generateRegistries(registriesInput, codeql, config.tempDir, logger));
|
||||||
|
}
|
||||||
|
await configUtils.wrapEnvironment({
|
||||||
|
GITHUB_TOKEN: apiDetails.auth,
|
||||||
|
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
|
||||||
|
},
|
||||||
// Init a database cluster
|
// Init a database cluster
|
||||||
await codeql.databaseInitCluster(config, sourceRoot, processName, featureEnablement, logger);
|
async () => await codeql.databaseInitCluster(config, sourceRoot, processName, featureEnablement, qlconfigFile, logger));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
3
lib/languages.js
generated
3
lib/languages.js
generated
@@ -1,6 +1,6 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.isScannedLanguage = exports.isTracedLanguage = exports.parseLanguage = exports.resolveAlias = exports.KOTLIN_SWIFT_BYPASS = exports.LANGUAGE_ALIASES = exports.Language = void 0;
|
exports.isScannedLanguage = exports.isTracedLanguage = exports.parseLanguage = exports.resolveAlias = exports.LANGUAGE_ALIASES = exports.Language = void 0;
|
||||||
// All the languages supported by CodeQL
|
// All the languages supported by CodeQL
|
||||||
var Language;
|
var Language;
|
||||||
(function (Language) {
|
(function (Language) {
|
||||||
@@ -21,7 +21,6 @@ exports.LANGUAGE_ALIASES = {
|
|||||||
kotlin: Language.java,
|
kotlin: Language.java,
|
||||||
typescript: Language.javascript,
|
typescript: Language.javascript,
|
||||||
};
|
};
|
||||||
exports.KOTLIN_SWIFT_BYPASS = ["kotlin", "swift"];
|
|
||||||
function resolveAlias(lang) {
|
function resolveAlias(lang) {
|
||||||
return exports.LANGUAGE_ALIASES[lang] || lang;
|
return exports.LANGUAGE_ALIASES[lang] || lang;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAAA,wCAAwC;AACxC,IAAY,QASX;AATD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;IACb,2BAAe,CAAA;AACjB,CAAC,EATW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QASnB;AAED,iCAAiC;AACpB,QAAA,gBAAgB,GAAiC;IAC5D,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,MAAM,EAAE,QAAQ,CAAC,IAAI;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAIW,QAAA,mBAAmB,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;AAEvD,SAAgB,YAAY,CAAC,IAAqB;IAChD,OAAO,wBAAgB,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC;AACxC,CAAC;AAFD,oCAEC;AAED;;;;;;;;;GASG;AACH,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;IAEzC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,iEAAiE;IACjE,oCAAoC;IACpC,IAAI,QAAQ,IAAI,wBAAgB,EAAE;QAChC,OAAO,QAAQ,CAAC;KACjB;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAhBD,sCAgBC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO;QACL,QAAQ,CAAC,GAAG;QACZ,QAAQ,CAAC,MAAM;QACf,QAAQ,CAAC,EAAE;QACX,QAAQ,CAAC,IAAI;QACb,QAAQ,CAAC,KAAK;KACf,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACvB,CAAC;AARD,4CAQC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
|
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAAA,wCAAwC;AACxC,IAAY,QASX;AATD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;IACb,2BAAe,CAAA;AACjB,CAAC,EATW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QASnB;AAED,iCAAiC;AACpB,QAAA,gBAAgB,GAAiC;IAC5D,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,MAAM,EAAE,QAAQ,CAAC,IAAI;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAIF,SAAgB,YAAY,CAAC,IAAqB;IAChD,OAAO,wBAAgB,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC;AACxC,CAAC;AAFD,oCAEC;AAED;;;;;;;;;GASG;AACH,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;IAEzC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,iEAAiE;IACjE,oCAAoC;IACpC,IAAI,QAAQ,IAAI,wBAAgB,EAAE;QAChC,OAAO,QAAQ,CAAC;KACjB;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAhBD,sCAgBC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO;QACL,QAAQ,CAAC,GAAG;QACZ,QAAQ,CAAC,MAAM;QACf,QAAQ,CAAC,EAAE;QACX,QAAQ,CAAC,IAAI;QACb,QAAQ,CAAC,KAAK;KACf,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACvB,CAAC;AARD,4CAQC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
|
||||||
282
lib/setup-codeql.js
generated
282
lib/setup-codeql.js
generated
@@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.setupCodeQLBundle = exports.getCodeQLURLVersion = exports.downloadCodeQL = exports.getCodeQLSource = exports.convertToSemVer = exports.getBundleVersionFromUrl = exports.tryFindCliVersionDotcomOnly = exports.findCodeQLBundleTagDotcomOnly = exports.getCodeQLActionRepository = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = void 0;
|
exports.setupCodeQLBundle = exports.getCodeQLURLVersion = exports.downloadCodeQL = exports.tryGetFallbackToolcacheVersion = exports.getCodeQLSource = exports.convertToSemVer = exports.tryGetBundleVersionFromUrl = exports.tryFindCliVersionDotcomOnly = exports.findCodeQLBundleTagDotcomOnly = exports.getCodeQLActionRepository = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const perf_hooks_1 = require("perf_hooks");
|
const perf_hooks_1 = require("perf_hooks");
|
||||||
@@ -211,14 +211,30 @@ async function getCodeQLBundleDownloadURL(tagName, apiDetails, variant, logger)
|
|||||||
}
|
}
|
||||||
return `https://github.com/${exports.CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${tagName}/${codeQLBundleName}`;
|
return `https://github.com/${exports.CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${tagName}/${codeQLBundleName}`;
|
||||||
}
|
}
|
||||||
function getBundleVersionFromUrl(url) {
|
function tryGetBundleVersionFromTagName(tagName, logger) {
|
||||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
const match = tagName.match(/^codeql-bundle-(.*)$/);
|
||||||
if (match === null || match.length < 2) {
|
if (match === null || match.length < 2) {
|
||||||
throw new Error(`Malformed tools url: ${url}. Bundle version could not be inferred`);
|
logger.debug(`Could not determine bundle version from tag ${tagName}.`);
|
||||||
|
return undefined;
|
||||||
}
|
}
|
||||||
return match[1];
|
return match[1];
|
||||||
}
|
}
|
||||||
exports.getBundleVersionFromUrl = getBundleVersionFromUrl;
|
function tryGetTagNameFromUrl(url, logger) {
|
||||||
|
const match = url.match(/\/(codeql-bundle-.*)\//);
|
||||||
|
if (match === null || match.length < 2) {
|
||||||
|
logger.debug(`Could not determine tag name for URL ${url}.`);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return match[1];
|
||||||
|
}
|
||||||
|
function tryGetBundleVersionFromUrl(url, logger) {
|
||||||
|
const tagName = tryGetTagNameFromUrl(url, logger);
|
||||||
|
if (tagName === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return tryGetBundleVersionFromTagName(tagName, logger);
|
||||||
|
}
|
||||||
|
exports.tryGetBundleVersionFromUrl = tryGetBundleVersionFromUrl;
|
||||||
function convertToSemVer(version, logger) {
|
function convertToSemVer(version, logger) {
|
||||||
if (!semver.valid(version)) {
|
if (!semver.valid(version)) {
|
||||||
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||||
@@ -231,18 +247,10 @@ function convertToSemVer(version, logger) {
|
|||||||
return s;
|
return s;
|
||||||
}
|
}
|
||||||
exports.convertToSemVer = convertToSemVer;
|
exports.convertToSemVer = convertToSemVer;
|
||||||
async function getOrFindBundleTagName(version, logger) {
|
|
||||||
if (version.variant === util.GitHubVariant.DOTCOM) {
|
|
||||||
return await findCodeQLBundleTagDotcomOnly(version.cliVersion, logger);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return version.tagName;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* Look for a version of the CodeQL tools in the cache which could override the requested CLI version.
|
* Look for a version of the CodeQL tools in the cache which could override the requested CLI version.
|
||||||
*/
|
*/
|
||||||
async function findOverridingToolsInCache(requestedCliVersion, logger) {
|
async function findOverridingToolsInCache(humanReadableVersion, logger) {
|
||||||
const candidates = toolcache
|
const candidates = toolcache
|
||||||
.findAllVersions("CodeQL")
|
.findAllVersions("CodeQL")
|
||||||
.filter(util_1.isGoodVersion)
|
.filter(util_1.isGoodVersion)
|
||||||
@@ -253,7 +261,7 @@ async function findOverridingToolsInCache(requestedCliVersion, logger) {
|
|||||||
.filter(({ folder }) => fs.existsSync(path.join(folder, "pinned-version")));
|
.filter(({ folder }) => fs.existsSync(path.join(folder, "pinned-version")));
|
||||||
if (candidates.length === 1) {
|
if (candidates.length === 1) {
|
||||||
const candidate = candidates[0];
|
const candidate = candidates[0];
|
||||||
logger.debug(`CodeQL tools version ${candidate.version} in toolcache overriding version ${requestedCliVersion}.`);
|
logger.debug(`CodeQL tools version ${candidate.version} in toolcache overriding version ${humanReadableVersion}.`);
|
||||||
return {
|
return {
|
||||||
codeqlFolder: candidate.folder,
|
codeqlFolder: candidate.folder,
|
||||||
sourceType: "toolcache",
|
sourceType: "toolcache",
|
||||||
@@ -269,7 +277,7 @@ async function findOverridingToolsInCache(requestedCliVersion, logger) {
|
|||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
async function getCodeQLSource(toolsInput, bypassToolcache, defaultCliVersion, apiDetails, variant, logger) {
|
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, logger) {
|
||||||
if (toolsInput && toolsInput !== "latest" && !toolsInput.startsWith("http")) {
|
if (toolsInput && toolsInput !== "latest" && !toolsInput.startsWith("http")) {
|
||||||
return {
|
return {
|
||||||
codeqlTarPath: toolsInput,
|
codeqlTarPath: toolsInput,
|
||||||
@@ -277,124 +285,166 @@ async function getCodeQLSource(toolsInput, bypassToolcache, defaultCliVersion, a
|
|||||||
toolsVersion: "local",
|
toolsVersion: "local",
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
const forceLatestReason =
|
|
||||||
// We use the special value of 'latest' to prioritize the version in the
|
|
||||||
// defaults over any pinned cached version.
|
|
||||||
toolsInput === "latest"
|
|
||||||
? '"tools: latest" was requested'
|
|
||||||
: // If the user hasn't requested a particular CodeQL version, then bypass
|
|
||||||
// the toolcache when the appropriate feature is enabled. This
|
|
||||||
// allows us to quickly rollback a broken bundle that has made its way
|
|
||||||
// into the toolcache.
|
|
||||||
toolsInput === undefined && bypassToolcache
|
|
||||||
? "a specific version of the CodeQL tools was not requested and the bypass toolcache feature is enabled"
|
|
||||||
: undefined;
|
|
||||||
const forceLatest = forceLatestReason !== undefined;
|
|
||||||
if (forceLatest) {
|
|
||||||
logger.debug(`Forcing the latest version of the CodeQL tools since ${forceLatestReason}.`);
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* The requested version is:
|
* Whether the tools shipped with the Action, i.e. those in `defaults.json`, have been forced.
|
||||||
*
|
*
|
||||||
* 1. The one in `defaults.json`, if forceLatest is true.
|
* We use the special value of 'latest' to prioritize the version in `defaults.json` over the
|
||||||
* 2. The version specified by the tools input URL, if one was provided.
|
* version specified by the feature flags on Dotcom and over any pinned cached version on
|
||||||
* 3. The default CLI version, otherwise.
|
* Enterprise Server.
|
||||||
|
|
||||||
* We include a `variant` property to let us verify using the type system that
|
|
||||||
* `tagName` is only undefined when the variant is Dotcom. This lets us ensure
|
|
||||||
* that we can always compute `tagName`, either by using the existing tag name
|
|
||||||
* on enterprise instances, or calling `findCodeQLBundleTagDotcomOnly` on
|
|
||||||
* Dotcom.
|
|
||||||
*/
|
*/
|
||||||
const requestedVersion = forceLatest
|
const forceShippedTools = toolsInput === "latest";
|
||||||
? // case 1
|
if (forceShippedTools) {
|
||||||
{
|
logger.info("Overriding the version of the CodeQL tools by the version shipped with the Action since " +
|
||||||
cliVersion: defaults.cliVersion,
|
`"tools: latest" was requested.`);
|
||||||
syntheticCliVersion: defaults.cliVersion,
|
|
||||||
tagName: defaults.bundleVersion,
|
|
||||||
variant,
|
|
||||||
}
|
}
|
||||||
: toolsInput !== undefined
|
/** CLI version number, for example 2.12.1. */
|
||||||
? // case 2
|
let cliVersion;
|
||||||
{
|
/** Tag name of the CodeQL bundle, for example `codeql-bundle-20230120`. */
|
||||||
syntheticCliVersion: convertToSemVer(getBundleVersionFromUrl(toolsInput), logger),
|
let tagName;
|
||||||
tagName: `codeql-bundle-${getBundleVersionFromUrl(toolsInput)}`,
|
/**
|
||||||
url: toolsInput,
|
* URL of the CodeQL bundle.
|
||||||
variant,
|
*
|
||||||
|
* This does not always include a tag name.
|
||||||
|
*/
|
||||||
|
let url;
|
||||||
|
if (forceShippedTools) {
|
||||||
|
cliVersion = defaults.cliVersion;
|
||||||
|
tagName = defaults.bundleVersion;
|
||||||
}
|
}
|
||||||
: // case 3
|
else if (toolsInput !== undefined) {
|
||||||
{
|
// If a tools URL was provided, then use that.
|
||||||
...defaultCliVersion,
|
tagName = tryGetTagNameFromUrl(toolsInput, logger);
|
||||||
syntheticCliVersion: defaultCliVersion.cliVersion,
|
url = toolsInput;
|
||||||
};
|
}
|
||||||
// If we find the specified version, we always use that.
|
else {
|
||||||
let codeqlFolder = toolcache.find("CodeQL", requestedVersion.syntheticCliVersion);
|
// Otherwise, use the default CLI version passed in.
|
||||||
let tagName = requestedVersion["tagName"];
|
cliVersion = defaultCliVersion.cliVersion;
|
||||||
|
tagName = defaultCliVersion["tagName"];
|
||||||
|
}
|
||||||
|
const bundleVersion = tagName && tryGetBundleVersionFromTagName(tagName, logger);
|
||||||
|
const humanReadableVersion = cliVersion ??
|
||||||
|
(bundleVersion && convertToSemVer(bundleVersion, logger)) ??
|
||||||
|
tagName ??
|
||||||
|
url ??
|
||||||
|
"unknown";
|
||||||
|
logger.debug("Attempting to obtain CodeQL tools. " +
|
||||||
|
`CLI version: ${cliVersion ?? "unknown"}, ` +
|
||||||
|
`bundle tag name: ${tagName ?? "unknown"}, ` +
|
||||||
|
`URL: ${url ?? "unspecified"}.`);
|
||||||
|
let codeqlFolder;
|
||||||
|
if (cliVersion) {
|
||||||
|
// If we find the specified CLI version, we always use that.
|
||||||
|
codeqlFolder = toolcache.find("CodeQL", cliVersion);
|
||||||
|
// Fall back to matching `x.y.z-<tagName>`.
|
||||||
if (!codeqlFolder) {
|
if (!codeqlFolder) {
|
||||||
logger.debug("Didn't find a version of the CodeQL tools in the toolcache with a version number " +
|
logger.debug("Didn't find a version of the CodeQL tools in the toolcache with a version number " +
|
||||||
`exactly matching ${requestedVersion.syntheticCliVersion}.`);
|
`exactly matching ${cliVersion}.`);
|
||||||
if (requestedVersion.cliVersion) {
|
|
||||||
const allVersions = toolcache.findAllVersions("CodeQL");
|
const allVersions = toolcache.findAllVersions("CodeQL");
|
||||||
logger.debug(`Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify(allVersions)}.`);
|
logger.debug(`Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify(allVersions)}.`);
|
||||||
// If there is exactly one version of the CodeQL tools in the toolcache, and that version is
|
// If there is exactly one version of the CodeQL tools in the toolcache, and that version is
|
||||||
// the form `x.y.z-<tagName>`, then use it.
|
// the form `x.y.z-<tagName>`, then use it.
|
||||||
const candidateVersions = allVersions.filter((version) => version.startsWith(`${requestedVersion.cliVersion}-`));
|
const candidateVersions = allVersions.filter((version) => version.startsWith(`${cliVersion}-`));
|
||||||
if (candidateVersions.length === 1) {
|
if (candidateVersions.length === 1) {
|
||||||
logger.debug("Exactly one candidate version found, using that.");
|
logger.debug(`Exactly one version of the CodeQL tools starting with ${cliVersion} found in the ` +
|
||||||
|
"toolcache, using that.");
|
||||||
codeqlFolder = toolcache.find("CodeQL", candidateVersions[0]);
|
codeqlFolder = toolcache.find("CodeQL", candidateVersions[0]);
|
||||||
}
|
}
|
||||||
|
else if (candidateVersions.length === 0) {
|
||||||
|
logger.debug(`Didn't find any versions of the CodeQL tools starting with ${cliVersion} ` +
|
||||||
|
`in the toolcache. Trying next fallback method.`);
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
logger.debug("Did not find exactly one version of the CodeQL tools starting with the requested version.");
|
logger.warning(`Found ${candidateVersions.length} versions of the CodeQL tools starting with ` +
|
||||||
|
`${cliVersion} in the toolcache, but at most one was expected.`);
|
||||||
|
logger.debug("Trying next fallback method.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!codeqlFolder && requestedVersion.cliVersion) {
|
// Fall back to matching `0.0.0-<bundleVersion>`.
|
||||||
// Fall back to accepting a `0.0.0-<tagName>` version if we didn't find the
|
if (!codeqlFolder && (cliVersion || tagName)) {
|
||||||
// `x.y.z` version. This is to support old versions of the toolcache.
|
if (cliVersion || tagName) {
|
||||||
//
|
const fallbackVersion = await tryGetFallbackToolcacheVersion(cliVersion, tagName, variant, logger);
|
||||||
// If we are on Dotcom, we will make an HTTP request to the Releases API here
|
if (fallbackVersion) {
|
||||||
// to find the tag name for the requested version.
|
|
||||||
tagName =
|
|
||||||
tagName || (await getOrFindBundleTagName(requestedVersion, logger));
|
|
||||||
const fallbackVersion = convertToSemVer(tagName, logger);
|
|
||||||
logger.debug(`Computed a fallback toolcache version number of ${fallbackVersion} for CodeQL tools version ` +
|
|
||||||
`${requestedVersion.cliVersion}.`);
|
|
||||||
codeqlFolder = toolcache.find("CodeQL", fallbackVersion);
|
codeqlFolder = toolcache.find("CodeQL", fallbackVersion);
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
logger.debug("Could not determine a fallback toolcache version number for CodeQL tools version " +
|
||||||
|
`${humanReadableVersion}.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
logger.debug("Both the CLI version and the bundle version are unknown, so we will not be able to find " +
|
||||||
|
"the requested version of the CodeQL tools in the toolcache.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (codeqlFolder) {
|
||||||
|
logger.info(`Found CodeQL tools version ${humanReadableVersion} in the toolcache.`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
logger.info(`Did not find CodeQL tools version ${humanReadableVersion} in the toolcache.`);
|
||||||
|
}
|
||||||
if (codeqlFolder) {
|
if (codeqlFolder) {
|
||||||
return {
|
return {
|
||||||
codeqlFolder,
|
codeqlFolder,
|
||||||
sourceType: "toolcache",
|
sourceType: "toolcache",
|
||||||
toolsVersion: requestedVersion.syntheticCliVersion,
|
toolsVersion: cliVersion ?? humanReadableVersion,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
logger.debug(`Did not find CodeQL tools version ${requestedVersion.syntheticCliVersion} in the toolcache.`);
|
|
||||||
// If we don't find the requested version on Enterprise, we may allow a
|
// If we don't find the requested version on Enterprise, we may allow a
|
||||||
// different version to save download time if the version hasn't been
|
// different version to save download time if the version hasn't been
|
||||||
// specified explicitly (in which case we always honor it).
|
// specified explicitly (in which case we always honor it).
|
||||||
if (variant !== util.GitHubVariant.DOTCOM && !forceLatest && !toolsInput) {
|
if (variant !== util.GitHubVariant.DOTCOM &&
|
||||||
const result = await findOverridingToolsInCache(requestedVersion.syntheticCliVersion, logger);
|
!forceShippedTools &&
|
||||||
|
!toolsInput) {
|
||||||
|
const result = await findOverridingToolsInCache(humanReadableVersion, logger);
|
||||||
if (result !== undefined) {
|
if (result !== undefined) {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (!url) {
|
||||||
|
if (!tagName && cliVersion && variant === util.GitHubVariant.DOTCOM) {
|
||||||
|
tagName = await findCodeQLBundleTagDotcomOnly(cliVersion, logger);
|
||||||
|
}
|
||||||
|
else if (!tagName) {
|
||||||
|
throw new Error(`Could not obtain the requested version (${humanReadableVersion}) of the CodeQL tools ` +
|
||||||
|
"since we could not compute the tag name.");
|
||||||
|
}
|
||||||
|
url = await getCodeQLBundleDownloadURL(tagName, apiDetails, variant, logger);
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
cliVersion: requestedVersion.cliVersion || undefined,
|
bundleVersion: tagName && tryGetBundleVersionFromTagName(tagName, logger),
|
||||||
codeqlURL: requestedVersion["url"] ||
|
cliVersion,
|
||||||
(await getCodeQLBundleDownloadURL(tagName ||
|
codeqlURL: url,
|
||||||
// The check on `requestedVersion.tagName` is redundant but lets us
|
|
||||||
// use the property that if we don't know `requestedVersion.tagName`,
|
|
||||||
// then we must know `requestedVersion.cliVersion`. This property is
|
|
||||||
// required by the type of `getOrFindBundleTagName`.
|
|
||||||
(requestedVersion.tagName !== undefined
|
|
||||||
? requestedVersion.tagName
|
|
||||||
: await getOrFindBundleTagName(requestedVersion, logger)), apiDetails, variant, logger)),
|
|
||||||
sourceType: "download",
|
sourceType: "download",
|
||||||
toolsVersion: requestedVersion.syntheticCliVersion,
|
toolsVersion: cliVersion ?? humanReadableVersion,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
exports.getCodeQLSource = getCodeQLSource;
|
exports.getCodeQLSource = getCodeQLSource;
|
||||||
async function downloadCodeQL(codeqlURL, maybeCliVersion, apiDetails, variant, tempDir, logger) {
|
/**
|
||||||
|
* Gets a fallback version number to use when looking for CodeQL in the toolcache if we didn't find
|
||||||
|
* the `x.y.z` version. This is to support old versions of the toolcache.
|
||||||
|
*/
|
||||||
|
async function tryGetFallbackToolcacheVersion(cliVersion, tagName, variant, logger) {
|
||||||
|
//
|
||||||
|
// If we are on Dotcom, we will make an HTTP request to the Releases API here
|
||||||
|
// to find the tag name for the requested version.
|
||||||
|
if (cliVersion && !tagName && variant === util.GitHubVariant.DOTCOM) {
|
||||||
|
tagName = await findCodeQLBundleTagDotcomOnly(cliVersion, logger);
|
||||||
|
}
|
||||||
|
if (!tagName) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const bundleVersion = tryGetBundleVersionFromTagName(tagName, logger);
|
||||||
|
if (!bundleVersion) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const fallbackVersion = convertToSemVer(bundleVersion, logger);
|
||||||
|
logger.debug(`Computed a fallback toolcache version number of ${fallbackVersion} for CodeQL version ` +
|
||||||
|
`${cliVersion ?? tagName}.`);
|
||||||
|
return fallbackVersion;
|
||||||
|
}
|
||||||
|
exports.tryGetFallbackToolcacheVersion = tryGetFallbackToolcacheVersion;
|
||||||
|
async function downloadCodeQL(codeqlURL, maybeBundleVersion, maybeCliVersion, apiDetails, variant, tempDir, logger) {
|
||||||
const parsedCodeQLURL = new URL(codeqlURL);
|
const parsedCodeQLURL = new URL(codeqlURL);
|
||||||
const searchParams = new URLSearchParams(parsedCodeQLURL.search);
|
const searchParams = new URLSearchParams(parsedCodeQLURL.search);
|
||||||
const headers = {
|
const headers = {
|
||||||
@@ -404,12 +454,13 @@ async function downloadCodeQL(codeqlURL, maybeCliVersion, apiDetails, variant, t
|
|||||||
// from the same GitHub instance the Action is running on.
|
// from the same GitHub instance the Action is running on.
|
||||||
// This avoids leaking Enterprise tokens to dotcom.
|
// This avoids leaking Enterprise tokens to dotcom.
|
||||||
// We also don't want to send an authorization header if there's already a token provided in the URL.
|
// We also don't want to send an authorization header if there's already a token provided in the URL.
|
||||||
|
let authorization = undefined;
|
||||||
if (searchParams.has("token")) {
|
if (searchParams.has("token")) {
|
||||||
logger.debug("CodeQL tools URL contains an authorization token.");
|
logger.debug("CodeQL tools URL contains an authorization token.");
|
||||||
}
|
}
|
||||||
else if (codeqlURL.startsWith(`${apiDetails.url}/`)) {
|
else if (codeqlURL.startsWith(`${apiDetails.url}/`)) {
|
||||||
logger.debug("Providing an authorization token to download CodeQL tools.");
|
logger.debug("Providing an authorization token to download CodeQL tools.");
|
||||||
headers.authorization = `token ${apiDetails.auth}`;
|
authorization = `token ${apiDetails.auth}`;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
logger.debug("Downloading CodeQL tools without an authorization token.");
|
logger.debug("Downloading CodeQL tools without an authorization token.");
|
||||||
@@ -418,16 +469,26 @@ async function downloadCodeQL(codeqlURL, maybeCliVersion, apiDetails, variant, t
|
|||||||
const dest = path.join(tempDir, (0, uuid_1.v4)());
|
const dest = path.join(tempDir, (0, uuid_1.v4)());
|
||||||
const finalHeaders = Object.assign({ "User-Agent": "CodeQL Action" }, headers);
|
const finalHeaders = Object.assign({ "User-Agent": "CodeQL Action" }, headers);
|
||||||
const toolsDownloadStart = perf_hooks_1.performance.now();
|
const toolsDownloadStart = perf_hooks_1.performance.now();
|
||||||
const codeqlPath = await toolcache.downloadTool(codeqlURL, dest, undefined, finalHeaders);
|
const codeqlPath = await toolcache.downloadTool(codeqlURL, dest, authorization, finalHeaders);
|
||||||
const toolsDownloadDurationMs = perf_hooks_1.performance.now() - toolsDownloadStart;
|
const toolsDownloadDurationMs = Math.round(perf_hooks_1.performance.now() - toolsDownloadStart);
|
||||||
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||||
const bundleVersion = getBundleVersionFromUrl(codeqlURL);
|
const bundleVersion = maybeBundleVersion ?? tryGetBundleVersionFromUrl(codeqlURL, logger);
|
||||||
|
if (bundleVersion === undefined) {
|
||||||
|
logger.debug("Could not cache CodeQL tools because we could not determine the bundle version from the " +
|
||||||
|
`URL ${codeqlURL}.`);
|
||||||
|
return {
|
||||||
|
toolsVersion: maybeCliVersion ?? "unknown",
|
||||||
|
codeqlFolder: codeqlExtracted,
|
||||||
|
toolsDownloadDurationMs,
|
||||||
|
};
|
||||||
|
}
|
||||||
// Try to compute the CLI version for this bundle
|
// Try to compute the CLI version for this bundle
|
||||||
const cliVersion = maybeCliVersion ||
|
if (maybeCliVersion === undefined &&
|
||||||
(variant === util.GitHubVariant.DOTCOM &&
|
variant === util.GitHubVariant.DOTCOM &&
|
||||||
(await tryFindCliVersionDotcomOnly(`codeql-bundle-${bundleVersion}`, logger))) ||
|
codeqlURL.includes(`/${exports.CODEQL_DEFAULT_ACTION_REPOSITORY}/`)) {
|
||||||
undefined;
|
maybeCliVersion = await tryFindCliVersionDotcomOnly(`codeql-bundle-${bundleVersion}`, logger);
|
||||||
|
}
|
||||||
// Include both the CLI version and the bundle version in the toolcache version number. That way
|
// Include both the CLI version and the bundle version in the toolcache version number. That way
|
||||||
// if the user requests the same URL again, we can get it from the cache without having to call
|
// if the user requests the same URL again, we can get it from the cache without having to call
|
||||||
// any of the Releases API.
|
// any of the Releases API.
|
||||||
@@ -437,11 +498,11 @@ async function downloadCodeQL(codeqlURL, maybeCliVersion, apiDetails, variant, t
|
|||||||
// CLI release. In principle, it should be enough to just check that the CLI version isn't a
|
// CLI release. In principle, it should be enough to just check that the CLI version isn't a
|
||||||
// pre-release, but the version numbers of CodeQL nightlies have the format `x.y.z+<timestamp>`,
|
// pre-release, but the version numbers of CodeQL nightlies have the format `x.y.z+<timestamp>`,
|
||||||
// and we don't want these nightlies to override stable CLI versions in the toolcache.
|
// and we don't want these nightlies to override stable CLI versions in the toolcache.
|
||||||
const toolcacheVersion = cliVersion && cliVersion.match(/^[0-9]+\.[0-9]+\.[0-9]+$/)
|
const toolcacheVersion = maybeCliVersion?.match(/^[0-9]+\.[0-9]+\.[0-9]+$/)
|
||||||
? `${cliVersion}-${bundleVersion}`
|
? `${maybeCliVersion}-${bundleVersion}`
|
||||||
: convertToSemVer(bundleVersion, logger);
|
: convertToSemVer(bundleVersion, logger);
|
||||||
return {
|
return {
|
||||||
toolsVersion: cliVersion || toolcacheVersion,
|
toolsVersion: maybeCliVersion ?? toolcacheVersion,
|
||||||
codeqlFolder: await toolcache.cacheDir(codeqlExtracted, "CodeQL", toolcacheVersion),
|
codeqlFolder: await toolcache.cacheDir(codeqlExtracted, "CodeQL", toolcacheVersion),
|
||||||
toolsDownloadDurationMs,
|
toolsDownloadDurationMs,
|
||||||
};
|
};
|
||||||
@@ -462,15 +523,14 @@ exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
|||||||
* @param apiDetails
|
* @param apiDetails
|
||||||
* @param tempDir
|
* @param tempDir
|
||||||
* @param variant
|
* @param variant
|
||||||
* @param bypassToolcache
|
|
||||||
* @param defaultCliVersion
|
* @param defaultCliVersion
|
||||||
* @param logger
|
* @param logger
|
||||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||||
* version requirement. Must be set to true outside tests.
|
* version requirement. Must be set to true outside tests.
|
||||||
* @returns the path to the extracted bundle, and the version of the tools
|
* @returns the path to the extracted bundle, and the version of the tools
|
||||||
*/
|
*/
|
||||||
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger) {
|
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
||||||
const source = await getCodeQLSource(toolsInput, bypassToolcache, defaultCliVersion, apiDetails, variant, logger);
|
const source = await getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, logger);
|
||||||
let codeqlFolder;
|
let codeqlFolder;
|
||||||
let toolsVersion = source.toolsVersion;
|
let toolsVersion = source.toolsVersion;
|
||||||
let toolsDownloadDurationMs;
|
let toolsDownloadDurationMs;
|
||||||
@@ -486,7 +546,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, bypas
|
|||||||
toolsSource = init_1.ToolsSource.Toolcache;
|
toolsSource = init_1.ToolsSource.Toolcache;
|
||||||
break;
|
break;
|
||||||
case "download": {
|
case "download": {
|
||||||
const result = await downloadCodeQL(source.codeqlURL, source.cliVersion, apiDetails, variant, tempDir, logger);
|
const result = await downloadCodeQL(source.codeqlURL, source.bundleVersion, source.cliVersion, apiDetails, variant, tempDir, logger);
|
||||||
toolsVersion = result.toolsVersion;
|
toolsVersion = result.toolsVersion;
|
||||||
codeqlFolder = result.codeqlFolder;
|
codeqlFolder = result.codeqlFolder;
|
||||||
toolsDownloadDurationMs = result.toolsDownloadDurationMs;
|
toolsDownloadDurationMs = result.toolsDownloadDurationMs;
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
7
lib/upload-lib.js
generated
7
lib/upload-lib.js
generated
@@ -293,7 +293,8 @@ async function waitForProcessing(repositoryNwo, sarifID, logger, options = {
|
|||||||
if (Date.now() >
|
if (Date.now() >
|
||||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
||||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
// It's possible the analysis will eventually finish processing, but it's not worth spending more
|
||||||
|
// Actions time waiting.
|
||||||
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@@ -329,7 +330,9 @@ async function waitForProcessing(repositoryNwo, sarifID, logger, options = {
|
|||||||
else {
|
else {
|
||||||
util.assertNever(status);
|
util.assertNever(status);
|
||||||
}
|
}
|
||||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS, {
|
||||||
|
allowProcessExit: false,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
67
lib/util.js
generated
67
lib/util.js
generated
@@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.parseMatrixInput = exports.shouldBypassToolcache = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
exports.parseMatrixInput = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.supportExpectDiscardedCache = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
@@ -40,7 +40,6 @@ const apiCompatibility = __importStar(require("./api-compatibility.json"));
|
|||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const config_utils_1 = require("./config-utils");
|
const config_utils_1 = require("./config-utils");
|
||||||
const feature_flags_1 = require("./feature-flags");
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const languages_1 = require("./languages");
|
|
||||||
const shared_environment_1 = require("./shared-environment");
|
const shared_environment_1 = require("./shared-environment");
|
||||||
/**
|
/**
|
||||||
* Specifies bundle versions that are known to be broken
|
* Specifies bundle versions that are known to be broken
|
||||||
@@ -456,16 +455,33 @@ async function bundleDb(config, language, codeql, dbName) {
|
|||||||
return databaseBundlePath;
|
return databaseBundlePath;
|
||||||
}
|
}
|
||||||
exports.bundleDb = bundleDb;
|
exports.bundleDb = bundleDb;
|
||||||
async function delay(milliseconds) {
|
/**
|
||||||
|
* @param milliseconds time to delay
|
||||||
|
* @param opts options
|
||||||
|
* @param opts.allowProcessExit if true, the timer will not prevent the process from exiting
|
||||||
|
*/
|
||||||
|
async function delay(milliseconds, { allowProcessExit }) {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const timer = setTimeout(resolve, milliseconds);
|
||||||
|
if (allowProcessExit) {
|
||||||
// Immediately `unref` the timer such that it only prevents the process from exiting if the
|
// Immediately `unref` the timer such that it only prevents the process from exiting if the
|
||||||
// surrounding promise is being awaited.
|
// surrounding promise is being awaited.
|
||||||
return new Promise((resolve) => setTimeout(resolve, milliseconds).unref());
|
timer.unref();
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
exports.delay = delay;
|
exports.delay = delay;
|
||||||
function isGoodVersion(versionSpec) {
|
function isGoodVersion(versionSpec) {
|
||||||
return !BROKEN_VERSIONS.includes(versionSpec);
|
return !BROKEN_VERSIONS.includes(versionSpec);
|
||||||
}
|
}
|
||||||
exports.isGoodVersion = isGoodVersion;
|
exports.isGoodVersion = isGoodVersion;
|
||||||
|
/**
|
||||||
|
* Checks whether the CodeQL CLI supports the `--expect-discarded-cache` command-line flag.
|
||||||
|
*/
|
||||||
|
async function supportExpectDiscardedCache(codeQL) {
|
||||||
|
return codeQlVersionAbove(codeQL, "2.12.1");
|
||||||
|
}
|
||||||
|
exports.supportExpectDiscardedCache = supportExpectDiscardedCache;
|
||||||
exports.ML_POWERED_JS_QUERIES_PACK_NAME = "codeql/javascript-experimental-atm-queries";
|
exports.ML_POWERED_JS_QUERIES_PACK_NAME = "codeql/javascript-experimental-atm-queries";
|
||||||
/**
|
/**
|
||||||
* Gets the ML-powered JS query pack to add to the analysis if a repo is opted into the ML-powered
|
* Gets the ML-powered JS query pack to add to the analysis if a repo is opted into the ML-powered
|
||||||
@@ -637,7 +653,7 @@ async function withTimeout(timeoutMs, promise, onTimeout) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
const timeoutTask = async () => {
|
const timeoutTask = async () => {
|
||||||
await delay(timeoutMs);
|
await delay(timeoutMs, { allowProcessExit: true });
|
||||||
if (!finished) {
|
if (!finished) {
|
||||||
// Workaround: While the promise racing below will allow the main code
|
// Workaround: While the promise racing below will allow the main code
|
||||||
// to continue, the process won't normally exit until the asynchronous
|
// to continue, the process won't normally exit until the asynchronous
|
||||||
@@ -660,7 +676,7 @@ exports.withTimeout = withTimeout;
|
|||||||
async function checkForTimeout() {
|
async function checkForTimeout() {
|
||||||
if (hadTimeout === true) {
|
if (hadTimeout === true) {
|
||||||
core.info("A timeout occurred, force exiting the process after 30 seconds to prevent hanging.");
|
core.info("A timeout occurred, force exiting the process after 30 seconds to prevent hanging.");
|
||||||
await delay(30000);
|
await delay(30000, { allowProcessExit: true });
|
||||||
process.exit();
|
process.exit();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -685,45 +701,6 @@ function isHostedRunner() {
|
|||||||
process.env["RUNNER_TOOL_CACHE"]?.includes("hostedtoolcache"));
|
process.env["RUNNER_TOOL_CACHE"]?.includes("hostedtoolcache"));
|
||||||
}
|
}
|
||||||
exports.isHostedRunner = isHostedRunner;
|
exports.isHostedRunner = isHostedRunner;
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param featuresEnablement The features enabled for the current run
|
|
||||||
* @param languagesInput Languages input from the workflow
|
|
||||||
* @param repository The owner/name of the repository
|
|
||||||
* @param logger A logger
|
|
||||||
* @returns A boolean indicating whether or not the toolcache should be bypassed and the latest codeql should be downloaded.
|
|
||||||
*/
|
|
||||||
async function shouldBypassToolcache(featuresEnablement, codeqlUrl, languagesInput, repository, logger) {
|
|
||||||
// An explicit codeql url is specified, that means the toolcache will not be used.
|
|
||||||
if (codeqlUrl) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// Check if the toolcache is disabled for all languages
|
|
||||||
if (await featuresEnablement.getValue(feature_flags_1.Feature.BypassToolcacheEnabled)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// Check if the toolcache is disabled for kotlin and swift.
|
|
||||||
if (!(await featuresEnablement.getValue(feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled))) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
// Now check to see if kotlin or swift is one of the languages being analyzed.
|
|
||||||
const { rawLanguages, autodetected } = await (0, config_utils_1.getRawLanguages)(languagesInput, repository, logger);
|
|
||||||
let bypass = rawLanguages.some((lang) => languages_1.KOTLIN_SWIFT_BYPASS.includes(lang));
|
|
||||||
if (bypass) {
|
|
||||||
logger.info(`Bypassing toolcache for kotlin or swift. Languages: ${rawLanguages}`);
|
|
||||||
}
|
|
||||||
else if (!autodetected && rawLanguages.includes(languages_1.Language.java)) {
|
|
||||||
// special case: java was explicitly specified, but there might be
|
|
||||||
// some kotlin in the repository, so we need to make a request for that.
|
|
||||||
const langsInRepo = await (0, config_utils_1.getLanguagesInRepo)(repository, logger);
|
|
||||||
if (langsInRepo.includes("kotlin")) {
|
|
||||||
logger.info(`Bypassing toolcache for kotlin.`);
|
|
||||||
bypass = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return bypass;
|
|
||||||
}
|
|
||||||
exports.shouldBypassToolcache = shouldBypassToolcache;
|
|
||||||
function parseMatrixInput(matrixInput) {
|
function parseMatrixInput(matrixInput) {
|
||||||
if (matrixInput === undefined || matrixInput === "null") {
|
if (matrixInput === undefined || matrixInput === "null") {
|
||||||
return undefined;
|
return undefined;
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
115
lib/util.test.js
generated
115
lib/util.test.js
generated
@@ -33,9 +33,7 @@ const github = __importStar(require("@actions/github"));
|
|||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon = __importStar(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const feature_flags_1 = require("./feature-flags");
|
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const repository_1 = require("./repository");
|
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
@@ -325,117 +323,4 @@ const shortTime = 10;
|
|||||||
t.deepEqual(shortTaskTimedOut, false);
|
t.deepEqual(shortTaskTimedOut, false);
|
||||||
t.deepEqual(result, 99);
|
t.deepEqual(result, 99);
|
||||||
});
|
});
|
||||||
const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
|
||||||
// eslint-disable-next-line github/array-foreach
|
|
||||||
[
|
|
||||||
{
|
|
||||||
name: "disabled",
|
|
||||||
features: [],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: undefined,
|
|
||||||
languagesInRepository: [],
|
|
||||||
expected: false,
|
|
||||||
expectedApiCall: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "disabled even though swift kotlin bypassed",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: undefined,
|
|
||||||
languagesInRepository: [],
|
|
||||||
expected: false,
|
|
||||||
expectedApiCall: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "disabled even though swift kotlin analyzed",
|
|
||||||
features: [],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: " sWiFt , KoTlIn ",
|
|
||||||
languagesInRepository: [],
|
|
||||||
expected: false,
|
|
||||||
expectedApiCall: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "toolcache bypass all",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: undefined,
|
|
||||||
languagesInRepository: [],
|
|
||||||
expected: true,
|
|
||||||
expectedApiCall: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "custom CodeQL",
|
|
||||||
features: [],
|
|
||||||
hasCustomCodeQL: true,
|
|
||||||
languagesInput: undefined,
|
|
||||||
languagesInRepository: [],
|
|
||||||
expected: true,
|
|
||||||
expectedApiCall: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "bypass swift",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: " sWiFt ,other",
|
|
||||||
languagesInRepository: [],
|
|
||||||
expected: true,
|
|
||||||
expectedApiCall: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "bypass kotlin",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: "other, KoTlIn ",
|
|
||||||
languagesInRepository: [],
|
|
||||||
expected: true,
|
|
||||||
expectedApiCall: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "bypass kotlin language from repository",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: "",
|
|
||||||
languagesInRepository: ["KoTlIn", "other"],
|
|
||||||
expected: true,
|
|
||||||
expectedApiCall: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "bypass swift language from repository",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: "",
|
|
||||||
languagesInRepository: ["SwiFt", "other"],
|
|
||||||
expected: true,
|
|
||||||
expectedApiCall: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "bypass java from input if there is kotlin in repository",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: "java",
|
|
||||||
languagesInRepository: ["kotlin", "other"],
|
|
||||||
expected: true,
|
|
||||||
expectedApiCall: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "don't bypass java from input if there is no kotlin in repository",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: "java",
|
|
||||||
languagesInRepository: ["java", "other"],
|
|
||||||
expected: false,
|
|
||||||
expectedApiCall: true,
|
|
||||||
},
|
|
||||||
].forEach((args) => {
|
|
||||||
(0, ava_1.default)(`shouldBypassToolcache: ${args.name}`, async (t) => {
|
|
||||||
const mockRequest = (0, testing_utils_1.mockLanguagesInRepo)(args.languagesInRepository);
|
|
||||||
const mockLogger = (0, logging_1.getRunnerLogger)(true);
|
|
||||||
const featureEnablement = (0, testing_utils_1.createFeatures)(args.features);
|
|
||||||
const codeqlUrl = args.hasCustomCodeQL ? "custom-codeql-url" : undefined;
|
|
||||||
const actual = await util.shouldBypassToolcache(featureEnablement, codeqlUrl, args.languagesInput, mockRepositoryNwo, mockLogger);
|
|
||||||
t.deepEqual(actual, args.expected);
|
|
||||||
t.deepEqual(mockRequest.called, args.expectedApiCall);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
//# sourceMappingURL=util.test.js.map
|
//# sourceMappingURL=util.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
58
node_modules/.package-lock.json
generated
vendored
58
node_modules/.package-lock.json
generated
vendored
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.2.0",
|
"version": "2.2.6",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
@@ -3354,19 +3354,17 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/glob": {
|
"node_modules/glob": {
|
||||||
"version": "8.0.1",
|
"version": "9.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/glob/-/glob-8.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/glob/-/glob-9.2.1.tgz",
|
||||||
"integrity": "sha512-cF7FYZZ47YzmCu7dDy50xSRRfO3ErRfrXuLZcNIuyiJEco0XSrGtuilG19L5xp3NcwTx7Gn+X6Tv3fmsUPTbow==",
|
"integrity": "sha512-Pxxgq3W0HyA3XUvSXcFhRSs+43Jsx0ddxcFrbjxNGkL2Ak5BAUBxLqI5G6ADDeCHLfzzXFhe0b1yYcctGmytMA==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"fs.realpath": "^1.0.0",
|
"fs.realpath": "^1.0.0",
|
||||||
"inflight": "^1.0.4",
|
"minimatch": "^7.4.1",
|
||||||
"inherits": "2",
|
"minipass": "^4.2.4",
|
||||||
"minimatch": "^5.0.1",
|
"path-scurry": "^1.6.1"
|
||||||
"once": "^1.3.0",
|
|
||||||
"path-is-absolute": "^1.0.0"
|
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=12"
|
"node": ">=16 || 14 >=14.17"
|
||||||
},
|
},
|
||||||
"funding": {
|
"funding": {
|
||||||
"url": "https://github.com/sponsors/isaacs"
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
@@ -3392,14 +3390,17 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/glob/node_modules/minimatch": {
|
"node_modules/glob/node_modules/minimatch": {
|
||||||
"version": "5.0.1",
|
"version": "7.4.2",
|
||||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-7.4.2.tgz",
|
||||||
"integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==",
|
"integrity": "sha512-xy4q7wou3vUoC9k1xGTXc+awNdGaGVHtFUaey8tiX4H1QRc04DZ/rmDFwNm2EBsuYEhAZ6SgMmYf3InGY6OauA==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"brace-expansion": "^2.0.1"
|
"brace-expansion": "^2.0.1"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=10"
|
"node": ">=10"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/globals": {
|
"node_modules/globals": {
|
||||||
@@ -4425,6 +4426,14 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/minipass": {
|
||||||
|
"version": "4.2.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.4.tgz",
|
||||||
|
"integrity": "sha512-lwycX3cBMTvcejsHITUgYj6Gy6A7Nh4Q6h9NP4sTHY1ccJlC7yKzDmiShEHsJ16Jf1nKGDEaiHxiltsJEvk0nQ==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/ms": {
|
"node_modules/ms": {
|
||||||
"version": "2.1.2",
|
"version": "2.1.2",
|
||||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
|
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
|
||||||
@@ -4839,6 +4848,29 @@
|
|||||||
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
|
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"node_modules/path-scurry": {
|
||||||
|
"version": "1.6.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.6.1.tgz",
|
||||||
|
"integrity": "sha512-OW+5s+7cw6253Q4E+8qQ/u1fVvcJQCJo/VFD8pje+dbJCF1n5ZRMV2AEHbGp+5Q7jxQIYJxkHopnj6nzdGeZLA==",
|
||||||
|
"dependencies": {
|
||||||
|
"lru-cache": "^7.14.1",
|
||||||
|
"minipass": "^4.0.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/path-scurry/node_modules/lru-cache": {
|
||||||
|
"version": "7.18.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
|
||||||
|
"integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/path-to-regexp": {
|
"node_modules/path-to-regexp": {
|
||||||
"version": "1.8.0",
|
"version": "1.8.0",
|
||||||
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz",
|
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz",
|
||||||
|
|||||||
2
node_modules/glob/LICENSE
generated
vendored
2
node_modules/glob/LICENSE
generated
vendored
@@ -1,6 +1,6 @@
|
|||||||
The ISC License
|
The ISC License
|
||||||
|
|
||||||
Copyright (c) 2009-2022 Isaac Z. Schlueter and Contributors
|
Copyright (c) 2009-2023 Isaac Z. Schlueter and Contributors
|
||||||
|
|
||||||
Permission to use, copy, modify, and/or distribute this software for any
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
purpose with or without fee is hereby granted, provided that the above
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
|||||||
1272
node_modules/glob/README.md
generated
vendored
1272
node_modules/glob/README.md
generated
vendored
File diff suppressed because it is too large
Load Diff
238
node_modules/glob/common.js
generated
vendored
238
node_modules/glob/common.js
generated
vendored
@@ -1,238 +0,0 @@
|
|||||||
exports.setopts = setopts
|
|
||||||
exports.ownProp = ownProp
|
|
||||||
exports.makeAbs = makeAbs
|
|
||||||
exports.finish = finish
|
|
||||||
exports.mark = mark
|
|
||||||
exports.isIgnored = isIgnored
|
|
||||||
exports.childrenIgnored = childrenIgnored
|
|
||||||
|
|
||||||
function ownProp (obj, field) {
|
|
||||||
return Object.prototype.hasOwnProperty.call(obj, field)
|
|
||||||
}
|
|
||||||
|
|
||||||
var fs = require("fs")
|
|
||||||
var path = require("path")
|
|
||||||
var minimatch = require("minimatch")
|
|
||||||
var isAbsolute = require("path-is-absolute")
|
|
||||||
var Minimatch = minimatch.Minimatch
|
|
||||||
|
|
||||||
function alphasort (a, b) {
|
|
||||||
return a.localeCompare(b, 'en')
|
|
||||||
}
|
|
||||||
|
|
||||||
function setupIgnores (self, options) {
|
|
||||||
self.ignore = options.ignore || []
|
|
||||||
|
|
||||||
if (!Array.isArray(self.ignore))
|
|
||||||
self.ignore = [self.ignore]
|
|
||||||
|
|
||||||
if (self.ignore.length) {
|
|
||||||
self.ignore = self.ignore.map(ignoreMap)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ignore patterns are always in dot:true mode.
|
|
||||||
function ignoreMap (pattern) {
|
|
||||||
var gmatcher = null
|
|
||||||
if (pattern.slice(-3) === '/**') {
|
|
||||||
var gpattern = pattern.replace(/(\/\*\*)+$/, '')
|
|
||||||
gmatcher = new Minimatch(gpattern, { dot: true })
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
matcher: new Minimatch(pattern, { dot: true }),
|
|
||||||
gmatcher: gmatcher
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function setopts (self, pattern, options) {
|
|
||||||
if (!options)
|
|
||||||
options = {}
|
|
||||||
|
|
||||||
// base-matching: just use globstar for that.
|
|
||||||
if (options.matchBase && -1 === pattern.indexOf("/")) {
|
|
||||||
if (options.noglobstar) {
|
|
||||||
throw new Error("base matching requires globstar")
|
|
||||||
}
|
|
||||||
pattern = "**/" + pattern
|
|
||||||
}
|
|
||||||
|
|
||||||
self.silent = !!options.silent
|
|
||||||
self.pattern = pattern
|
|
||||||
self.strict = options.strict !== false
|
|
||||||
self.realpath = !!options.realpath
|
|
||||||
self.realpathCache = options.realpathCache || Object.create(null)
|
|
||||||
self.follow = !!options.follow
|
|
||||||
self.dot = !!options.dot
|
|
||||||
self.mark = !!options.mark
|
|
||||||
self.nodir = !!options.nodir
|
|
||||||
if (self.nodir)
|
|
||||||
self.mark = true
|
|
||||||
self.sync = !!options.sync
|
|
||||||
self.nounique = !!options.nounique
|
|
||||||
self.nonull = !!options.nonull
|
|
||||||
self.nosort = !!options.nosort
|
|
||||||
self.nocase = !!options.nocase
|
|
||||||
self.stat = !!options.stat
|
|
||||||
self.noprocess = !!options.noprocess
|
|
||||||
self.absolute = !!options.absolute
|
|
||||||
self.fs = options.fs || fs
|
|
||||||
|
|
||||||
self.maxLength = options.maxLength || Infinity
|
|
||||||
self.cache = options.cache || Object.create(null)
|
|
||||||
self.statCache = options.statCache || Object.create(null)
|
|
||||||
self.symlinks = options.symlinks || Object.create(null)
|
|
||||||
|
|
||||||
setupIgnores(self, options)
|
|
||||||
|
|
||||||
self.changedCwd = false
|
|
||||||
var cwd = process.cwd()
|
|
||||||
if (!ownProp(options, "cwd"))
|
|
||||||
self.cwd = cwd
|
|
||||||
else {
|
|
||||||
self.cwd = path.resolve(options.cwd)
|
|
||||||
self.changedCwd = self.cwd !== cwd
|
|
||||||
}
|
|
||||||
|
|
||||||
self.root = options.root || path.resolve(self.cwd, "/")
|
|
||||||
self.root = path.resolve(self.root)
|
|
||||||
if (process.platform === "win32")
|
|
||||||
self.root = self.root.replace(/\\/g, "/")
|
|
||||||
|
|
||||||
// TODO: is an absolute `cwd` supposed to be resolved against `root`?
|
|
||||||
// e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
|
|
||||||
self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)
|
|
||||||
if (process.platform === "win32")
|
|
||||||
self.cwdAbs = self.cwdAbs.replace(/\\/g, "/")
|
|
||||||
self.nomount = !!options.nomount
|
|
||||||
|
|
||||||
// disable comments and negation in Minimatch.
|
|
||||||
// Note that they are not supported in Glob itself anyway.
|
|
||||||
options.nonegate = true
|
|
||||||
options.nocomment = true
|
|
||||||
// always treat \ in patterns as escapes, not path separators
|
|
||||||
options.allowWindowsEscape = true
|
|
||||||
|
|
||||||
self.minimatch = new Minimatch(pattern, options)
|
|
||||||
self.options = self.minimatch.options
|
|
||||||
}
|
|
||||||
|
|
||||||
function finish (self) {
|
|
||||||
var nou = self.nounique
|
|
||||||
var all = nou ? [] : Object.create(null)
|
|
||||||
|
|
||||||
for (var i = 0, l = self.matches.length; i < l; i ++) {
|
|
||||||
var matches = self.matches[i]
|
|
||||||
if (!matches || Object.keys(matches).length === 0) {
|
|
||||||
if (self.nonull) {
|
|
||||||
// do like the shell, and spit out the literal glob
|
|
||||||
var literal = self.minimatch.globSet[i]
|
|
||||||
if (nou)
|
|
||||||
all.push(literal)
|
|
||||||
else
|
|
||||||
all[literal] = true
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// had matches
|
|
||||||
var m = Object.keys(matches)
|
|
||||||
if (nou)
|
|
||||||
all.push.apply(all, m)
|
|
||||||
else
|
|
||||||
m.forEach(function (m) {
|
|
||||||
all[m] = true
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!nou)
|
|
||||||
all = Object.keys(all)
|
|
||||||
|
|
||||||
if (!self.nosort)
|
|
||||||
all = all.sort(alphasort)
|
|
||||||
|
|
||||||
// at *some* point we statted all of these
|
|
||||||
if (self.mark) {
|
|
||||||
for (var i = 0; i < all.length; i++) {
|
|
||||||
all[i] = self._mark(all[i])
|
|
||||||
}
|
|
||||||
if (self.nodir) {
|
|
||||||
all = all.filter(function (e) {
|
|
||||||
var notDir = !(/\/$/.test(e))
|
|
||||||
var c = self.cache[e] || self.cache[makeAbs(self, e)]
|
|
||||||
if (notDir && c)
|
|
||||||
notDir = c !== 'DIR' && !Array.isArray(c)
|
|
||||||
return notDir
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (self.ignore.length)
|
|
||||||
all = all.filter(function(m) {
|
|
||||||
return !isIgnored(self, m)
|
|
||||||
})
|
|
||||||
|
|
||||||
self.found = all
|
|
||||||
}
|
|
||||||
|
|
||||||
function mark (self, p) {
|
|
||||||
var abs = makeAbs(self, p)
|
|
||||||
var c = self.cache[abs]
|
|
||||||
var m = p
|
|
||||||
if (c) {
|
|
||||||
var isDir = c === 'DIR' || Array.isArray(c)
|
|
||||||
var slash = p.slice(-1) === '/'
|
|
||||||
|
|
||||||
if (isDir && !slash)
|
|
||||||
m += '/'
|
|
||||||
else if (!isDir && slash)
|
|
||||||
m = m.slice(0, -1)
|
|
||||||
|
|
||||||
if (m !== p) {
|
|
||||||
var mabs = makeAbs(self, m)
|
|
||||||
self.statCache[mabs] = self.statCache[abs]
|
|
||||||
self.cache[mabs] = self.cache[abs]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return m
|
|
||||||
}
|
|
||||||
|
|
||||||
// lotta situps...
|
|
||||||
function makeAbs (self, f) {
|
|
||||||
var abs = f
|
|
||||||
if (f.charAt(0) === '/') {
|
|
||||||
abs = path.join(self.root, f)
|
|
||||||
} else if (isAbsolute(f) || f === '') {
|
|
||||||
abs = f
|
|
||||||
} else if (self.changedCwd) {
|
|
||||||
abs = path.resolve(self.cwd, f)
|
|
||||||
} else {
|
|
||||||
abs = path.resolve(f)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (process.platform === 'win32')
|
|
||||||
abs = abs.replace(/\\/g, '/')
|
|
||||||
|
|
||||||
return abs
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
|
|
||||||
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
|
|
||||||
function isIgnored (self, path) {
|
|
||||||
if (!self.ignore.length)
|
|
||||||
return false
|
|
||||||
|
|
||||||
return self.ignore.some(function(item) {
|
|
||||||
return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function childrenIgnored (self, path) {
|
|
||||||
if (!self.ignore.length)
|
|
||||||
return false
|
|
||||||
|
|
||||||
return self.ignore.some(function(item) {
|
|
||||||
return !!(item.gmatcher && item.gmatcher.match(path))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
328
node_modules/glob/dist/cjs/glob.d.ts
generated
vendored
Normal file
328
node_modules/glob/dist/cjs/glob.d.ts
generated
vendored
Normal file
@@ -0,0 +1,328 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
import { Minimatch } from 'minimatch';
|
||||||
|
import Minipass from 'minipass';
|
||||||
|
import { FSOption, Path, PathScurry } from 'path-scurry';
|
||||||
|
import { IgnoreLike } from './ignore.js';
|
||||||
|
import { Pattern } from './pattern.js';
|
||||||
|
export type MatchSet = Minimatch['set'];
|
||||||
|
export type GlobParts = Exclude<Minimatch['globParts'], undefined>;
|
||||||
|
/**
|
||||||
|
* A `GlobOptions` object may be provided to any of the exported methods, and
|
||||||
|
* must be provided to the `Glob` constructor.
|
||||||
|
*
|
||||||
|
* All options are optional, boolean, and false by default, unless otherwise
|
||||||
|
* noted.
|
||||||
|
*
|
||||||
|
* All resolved options are added to the Glob object as properties.
|
||||||
|
*
|
||||||
|
* If you are running many `glob` operations, you can pass a Glob object as the
|
||||||
|
* `options` argument to a subsequent operation to share the previously loaded
|
||||||
|
* cache.
|
||||||
|
*/
|
||||||
|
export interface GlobOptions {
|
||||||
|
/**
|
||||||
|
* Set to `true` to always receive absolute paths for
|
||||||
|
* matched files. Set to `false` to always return relative paths.
|
||||||
|
*
|
||||||
|
* When this option is not set, absolute paths are returned for patterns
|
||||||
|
* that are absolute, and otherwise paths are returned that are relative
|
||||||
|
* to the `cwd` setting.
|
||||||
|
*
|
||||||
|
* This does _not_ make an extra system call to get
|
||||||
|
* the realpath, it only does string path resolution.
|
||||||
|
*
|
||||||
|
* Conflicts with {@link withFileTypes}
|
||||||
|
*/
|
||||||
|
absolute?: boolean;
|
||||||
|
/**
|
||||||
|
* Set to false to enable {@link windowsPathsNoEscape}
|
||||||
|
*
|
||||||
|
* @deprecated
|
||||||
|
*/
|
||||||
|
allowWindowsEscape?: boolean;
|
||||||
|
/**
|
||||||
|
* The current working directory in which to search. Defaults to
|
||||||
|
* `process.cwd()`.
|
||||||
|
*
|
||||||
|
* May be eiher a string path or a `file://` URL object or string.
|
||||||
|
*/
|
||||||
|
cwd?: string | URL;
|
||||||
|
/**
|
||||||
|
* Include `.dot` files in normal matches and `globstar`
|
||||||
|
* matches. Note that an explicit dot in a portion of the pattern
|
||||||
|
* will always match dot files.
|
||||||
|
*/
|
||||||
|
dot?: boolean;
|
||||||
|
/**
|
||||||
|
* Prepend all relative path strings with `./` (or `.\` on Windows).
|
||||||
|
*
|
||||||
|
* Without this option, returned relative paths are "bare", so instead of
|
||||||
|
* returning `'./foo/bar'`, they are returned as `'foo/bar'`.
|
||||||
|
*
|
||||||
|
* Relative patterns starting with `'../'` are not prepended with `./`, even
|
||||||
|
* if this option is set.
|
||||||
|
*/
|
||||||
|
dotRelative?: boolean;
|
||||||
|
/**
|
||||||
|
* Follow symlinked directories when expanding `**`
|
||||||
|
* patterns. This can result in a lot of duplicate references in
|
||||||
|
* the presence of cyclic links, and make performance quite bad.
|
||||||
|
*
|
||||||
|
* By default, a `**` in a pattern will follow 1 symbolic link if
|
||||||
|
* it is not the first item in the pattern, or none if it is the
|
||||||
|
* first item in the pattern, following the same behavior as Bash.
|
||||||
|
*/
|
||||||
|
follow?: boolean;
|
||||||
|
/**
|
||||||
|
* string or string[], or an object with `ignore` and `ignoreChildren`
|
||||||
|
* methods.
|
||||||
|
*
|
||||||
|
* If a string or string[] is provided, then this is treated as a glob
|
||||||
|
* pattern or array of glob patterns to exclude from matches. To ignore all
|
||||||
|
* children within a directory, as well as the entry itself, append `'/**'`
|
||||||
|
* to the ignore pattern.
|
||||||
|
*
|
||||||
|
* **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of
|
||||||
|
* any other settings.
|
||||||
|
*
|
||||||
|
* If an object is provided that has `ignored(path)` and/or
|
||||||
|
* `childrenIgnored(path)` methods, then these methods will be called to
|
||||||
|
* determine whether any Path is a match or if its children should be
|
||||||
|
* traversed, respectively.
|
||||||
|
*/
|
||||||
|
ignore?: string | string[] | IgnoreLike;
|
||||||
|
/**
|
||||||
|
* Treat brace expansion like `{a,b}` as a "magic" pattern. Has no
|
||||||
|
* effect if {@link nobrace} is set.
|
||||||
|
*
|
||||||
|
* Only has effect on the {@link hasMagic} function.
|
||||||
|
*/
|
||||||
|
magicalBraces?: boolean;
|
||||||
|
/**
|
||||||
|
* Add a `/` character to directory matches. Note that this requires
|
||||||
|
* additional stat calls in some cases.
|
||||||
|
*/
|
||||||
|
mark?: boolean;
|
||||||
|
/**
|
||||||
|
* Perform a basename-only match if the pattern does not contain any slash
|
||||||
|
* characters. That is, `*.js` would be treated as equivalent to
|
||||||
|
* `**\/*.js`, matching all js files in all directories.
|
||||||
|
*/
|
||||||
|
matchBase?: boolean;
|
||||||
|
/**
|
||||||
|
* Limit the directory traversal to a given depth below the cwd.
|
||||||
|
* Note that this does NOT prevent traversal to sibling folders,
|
||||||
|
* root patterns, and so on. It only limits the maximum folder depth
|
||||||
|
* that the walk will descend, relative to the cwd.
|
||||||
|
*/
|
||||||
|
maxDepth?: number;
|
||||||
|
/**
|
||||||
|
* Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||||
|
*/
|
||||||
|
nobrace?: boolean;
|
||||||
|
/**
|
||||||
|
* Perform a case-insensitive match. This defaults to `true` on macOS and
|
||||||
|
* Windows systems, and `false` on all others.
|
||||||
|
*
|
||||||
|
* **Note** `nocase` should only be explicitly set when it is
|
||||||
|
* known that the filesystem's case sensitivity differs from the
|
||||||
|
* platform default. If set `true` on case-sensitive file
|
||||||
|
* systems, or `false` on case-insensitive file systems, then the
|
||||||
|
* walk may return more or less results than expected.
|
||||||
|
*/
|
||||||
|
nocase?: boolean;
|
||||||
|
/**
|
||||||
|
* Do not match directories, only files. (Note: to match
|
||||||
|
* _only_ directories, put a `/` at the end of the pattern.)
|
||||||
|
*/
|
||||||
|
nodir?: boolean;
|
||||||
|
/**
|
||||||
|
* Do not match "extglob" patterns such as `+(a|b)`.
|
||||||
|
*/
|
||||||
|
noext?: boolean;
|
||||||
|
/**
|
||||||
|
* Do not match `**` against multiple filenames. (Ie, treat it as a normal
|
||||||
|
* `*` instead.)
|
||||||
|
*
|
||||||
|
* Conflicts with {@link matchBase}
|
||||||
|
*/
|
||||||
|
noglobstar?: boolean;
|
||||||
|
/**
|
||||||
|
* Defaults to value of `process.platform` if available, or `'linux'` if
|
||||||
|
* not. Setting `platform:'win32'` on non-Windows systems may cause strange
|
||||||
|
* behavior.
|
||||||
|
*/
|
||||||
|
platform?: NodeJS.Platform;
|
||||||
|
/**
|
||||||
|
* Set to true to call `fs.realpath` on all of the
|
||||||
|
* results. In the case of an entry that cannot be resolved, the
|
||||||
|
* entry is omitted. This incurs a slight performance penalty, of
|
||||||
|
* course, because of the added system calls.
|
||||||
|
*/
|
||||||
|
realpath?: boolean;
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* A string path resolved against the `cwd` option, which
|
||||||
|
* is used as the starting point for absolute patterns that start
|
||||||
|
* with `/`, (but not drive letters or UNC paths on Windows).
|
||||||
|
*
|
||||||
|
* Note that this _doesn't_ necessarily limit the walk to the
|
||||||
|
* `root` directory, and doesn't affect the cwd starting point for
|
||||||
|
* non-absolute patterns. A pattern containing `..` will still be
|
||||||
|
* able to traverse out of the root directory, if it is not an
|
||||||
|
* actual root directory on the filesystem, and any non-absolute
|
||||||
|
* patterns will be matched in the `cwd`. For example, the
|
||||||
|
* pattern `/../*` with `{root:'/some/path'}` will return all
|
||||||
|
* files in `/some`, not all files in `/some/path`. The pattern
|
||||||
|
* `*` with `{root:'/some/path'}` will return all the entries in
|
||||||
|
* the cwd, not the entries in `/some/path`.
|
||||||
|
*
|
||||||
|
* To start absolute and non-absolute patterns in the same
|
||||||
|
* path, you can use `{root:''}`. However, be aware that on
|
||||||
|
* Windows systems, a pattern like `x:/*` or `//host/share/*` will
|
||||||
|
* _always_ start in the `x:/` or `//host/share` directory,
|
||||||
|
* regardless of the `root` setting.
|
||||||
|
*/
|
||||||
|
root?: string;
|
||||||
|
/**
|
||||||
|
* A [PathScurry](http://npm.im/path-scurry) object used
|
||||||
|
* to traverse the file system. If the `nocase` option is set
|
||||||
|
* explicitly, then any provided `scurry` object must match this
|
||||||
|
* setting.
|
||||||
|
*/
|
||||||
|
scurry?: PathScurry;
|
||||||
|
/**
|
||||||
|
* Call `lstat()` on all entries, whether required or not to determine
|
||||||
|
* whether it's a valid match. When used with {@link withFileTypes}, this
|
||||||
|
* means that matches will include data such as modified time, permissions,
|
||||||
|
* and so on. Note that this will incur a performance cost due to the added
|
||||||
|
* system calls.
|
||||||
|
*/
|
||||||
|
stat?: boolean;
|
||||||
|
/**
|
||||||
|
* An AbortSignal which will cancel the Glob walk when
|
||||||
|
* triggered.
|
||||||
|
*/
|
||||||
|
signal?: AbortSignal;
|
||||||
|
/**
|
||||||
|
* Use `\\` as a path separator _only_, and
|
||||||
|
* _never_ as an escape character. If set, all `\\` characters are
|
||||||
|
* replaced with `/` in the pattern.
|
||||||
|
*
|
||||||
|
* Note that this makes it **impossible** to match against paths
|
||||||
|
* containing literal glob pattern characters, but allows matching
|
||||||
|
* with patterns constructed using `path.join()` and
|
||||||
|
* `path.resolve()` on Windows platforms, mimicking the (buggy!)
|
||||||
|
* behavior of Glob v7 and before on Windows. Please use with
|
||||||
|
* caution, and be mindful of [the caveat below about Windows
|
||||||
|
* paths](#windows). (For legacy reasons, this is also set if
|
||||||
|
* `allowWindowsEscape` is set to the exact value `false`.)
|
||||||
|
*/
|
||||||
|
windowsPathsNoEscape?: boolean;
|
||||||
|
/**
|
||||||
|
* Return [PathScurry](http://npm.im/path-scurry)
|
||||||
|
* `Path` objects instead of strings. These are similar to a
|
||||||
|
* NodeJS `Dirent` object, but with additional methods and
|
||||||
|
* properties.
|
||||||
|
*
|
||||||
|
* Conflicts with {@link absolute}
|
||||||
|
*/
|
||||||
|
withFileTypes?: boolean;
|
||||||
|
/**
|
||||||
|
* An fs implementation to override some or all of the defaults. See
|
||||||
|
* http://npm.im/path-scurry for details about what can be overridden.
|
||||||
|
*/
|
||||||
|
fs?: FSOption;
|
||||||
|
}
|
||||||
|
export type GlobOptionsWithFileTypesTrue = GlobOptions & {
|
||||||
|
withFileTypes: true;
|
||||||
|
absolute?: undefined;
|
||||||
|
};
|
||||||
|
export type GlobOptionsWithFileTypesFalse = GlobOptions & {
|
||||||
|
withFileTypes?: false;
|
||||||
|
};
|
||||||
|
export type GlobOptionsWithFileTypesUnset = GlobOptions & {
|
||||||
|
withFileTypes?: undefined;
|
||||||
|
};
|
||||||
|
export type Result<Opts> = Opts extends GlobOptionsWithFileTypesTrue ? Path : Opts extends GlobOptionsWithFileTypesFalse ? string : Opts extends GlobOptionsWithFileTypesUnset ? string : string | Path;
|
||||||
|
export type Results<Opts> = Result<Opts>[];
|
||||||
|
export type FileTypes<Opts> = Opts extends GlobOptionsWithFileTypesTrue ? true : Opts extends GlobOptionsWithFileTypesFalse ? false : Opts extends GlobOptionsWithFileTypesUnset ? false : boolean;
|
||||||
|
/**
|
||||||
|
* An object that can perform glob pattern traversals.
|
||||||
|
*/
|
||||||
|
export declare class Glob<Opts extends GlobOptions> implements GlobOptions {
|
||||||
|
absolute?: boolean;
|
||||||
|
cwd: string;
|
||||||
|
root?: string;
|
||||||
|
dot: boolean;
|
||||||
|
dotRelative: boolean;
|
||||||
|
follow: boolean;
|
||||||
|
ignore?: string | string[] | IgnoreLike;
|
||||||
|
magicalBraces: boolean;
|
||||||
|
mark?: boolean;
|
||||||
|
matchBase: boolean;
|
||||||
|
maxDepth: number;
|
||||||
|
nobrace: boolean;
|
||||||
|
nocase: boolean;
|
||||||
|
nodir: boolean;
|
||||||
|
noext: boolean;
|
||||||
|
noglobstar: boolean;
|
||||||
|
pattern: string[];
|
||||||
|
platform: NodeJS.Platform;
|
||||||
|
realpath: boolean;
|
||||||
|
scurry: PathScurry;
|
||||||
|
stat: boolean;
|
||||||
|
signal?: AbortSignal;
|
||||||
|
windowsPathsNoEscape: boolean;
|
||||||
|
withFileTypes: FileTypes<Opts>;
|
||||||
|
/**
|
||||||
|
* The options provided to the constructor.
|
||||||
|
*/
|
||||||
|
opts: Opts;
|
||||||
|
/**
|
||||||
|
* An array of parsed immutable {@link Pattern} objects.
|
||||||
|
*/
|
||||||
|
patterns: Pattern[];
|
||||||
|
/**
|
||||||
|
* All options are stored as properties on the `Glob` object.
|
||||||
|
*
|
||||||
|
* See {@link GlobOptions} for full options descriptions.
|
||||||
|
*
|
||||||
|
* Note that a previous `Glob` object can be passed as the
|
||||||
|
* `GlobOptions` to another `Glob` instantiation to re-use settings
|
||||||
|
* and caches with a new pattern.
|
||||||
|
*
|
||||||
|
* Traversal functions can be called multiple times to run the walk
|
||||||
|
* again.
|
||||||
|
*/
|
||||||
|
constructor(pattern: string | string[], opts: Opts);
|
||||||
|
/**
|
||||||
|
* Returns a Promise that resolves to the results array.
|
||||||
|
*/
|
||||||
|
walk(): Promise<Results<Opts>>;
|
||||||
|
/**
|
||||||
|
* synchronous {@link Glob.walk}
|
||||||
|
*/
|
||||||
|
walkSync(): Results<Opts>;
|
||||||
|
/**
|
||||||
|
* Stream results asynchronously.
|
||||||
|
*/
|
||||||
|
stream(): Minipass<Result<Opts>, Result<Opts>>;
|
||||||
|
/**
|
||||||
|
* Stream results synchronously.
|
||||||
|
*/
|
||||||
|
streamSync(): Minipass<Result<Opts>, Result<Opts>>;
|
||||||
|
/**
|
||||||
|
* Default sync iteration function. Returns a Generator that
|
||||||
|
* iterates over the results.
|
||||||
|
*/
|
||||||
|
iterateSync(): Generator<Result<Opts>, void, void>;
|
||||||
|
[Symbol.iterator](): Generator<Result<Opts>, void, void>;
|
||||||
|
/**
|
||||||
|
* Default async iteration function. Returns an AsyncGenerator that
|
||||||
|
* iterates over the results.
|
||||||
|
*/
|
||||||
|
iterate(): AsyncGenerator<Result<Opts>, void, void>;
|
||||||
|
[Symbol.asyncIterator](): AsyncGenerator<Result<Opts>, void, void>;
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=glob.d.ts.map
|
||||||
1
node_modules/glob/dist/cjs/glob.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/glob.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,QAAQ,MAAM,UAAU,CAAA;AAC/B,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAWlE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;CACd;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IACnB,QAAQ,CAAC,EAAE,SAAS,CAAA;CACrB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GAChE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GACnE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAE9B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IA6GlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAmBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAezB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAa9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAalD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"}
|
||||||
228
node_modules/glob/dist/cjs/glob.js
generated
vendored
Normal file
228
node_modules/glob/dist/cjs/glob.js
generated
vendored
Normal file
@@ -0,0 +1,228 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.Glob = void 0;
|
||||||
|
const minimatch_1 = require("minimatch");
|
||||||
|
const path_scurry_1 = require("path-scurry");
|
||||||
|
const url_1 = require("url");
|
||||||
|
const pattern_js_1 = require("./pattern.js");
|
||||||
|
const walker_js_1 = require("./walker.js");
|
||||||
|
// if no process global, just call it linux.
|
||||||
|
// so we default to case-sensitive, / separators
|
||||||
|
const defaultPlatform = typeof process === 'object' &&
|
||||||
|
process &&
|
||||||
|
typeof process.platform === 'string'
|
||||||
|
? process.platform
|
||||||
|
: 'linux';
|
||||||
|
/**
|
||||||
|
* An object that can perform glob pattern traversals.
|
||||||
|
*/
|
||||||
|
class Glob {
|
||||||
|
absolute;
|
||||||
|
cwd;
|
||||||
|
root;
|
||||||
|
dot;
|
||||||
|
dotRelative;
|
||||||
|
follow;
|
||||||
|
ignore;
|
||||||
|
magicalBraces;
|
||||||
|
mark;
|
||||||
|
matchBase;
|
||||||
|
maxDepth;
|
||||||
|
nobrace;
|
||||||
|
nocase;
|
||||||
|
nodir;
|
||||||
|
noext;
|
||||||
|
noglobstar;
|
||||||
|
pattern;
|
||||||
|
platform;
|
||||||
|
realpath;
|
||||||
|
scurry;
|
||||||
|
stat;
|
||||||
|
signal;
|
||||||
|
windowsPathsNoEscape;
|
||||||
|
withFileTypes;
|
||||||
|
/**
|
||||||
|
* The options provided to the constructor.
|
||||||
|
*/
|
||||||
|
opts;
|
||||||
|
/**
|
||||||
|
* An array of parsed immutable {@link Pattern} objects.
|
||||||
|
*/
|
||||||
|
patterns;
|
||||||
|
/**
|
||||||
|
* All options are stored as properties on the `Glob` object.
|
||||||
|
*
|
||||||
|
* See {@link GlobOptions} for full options descriptions.
|
||||||
|
*
|
||||||
|
* Note that a previous `Glob` object can be passed as the
|
||||||
|
* `GlobOptions` to another `Glob` instantiation to re-use settings
|
||||||
|
* and caches with a new pattern.
|
||||||
|
*
|
||||||
|
* Traversal functions can be called multiple times to run the walk
|
||||||
|
* again.
|
||||||
|
*/
|
||||||
|
constructor(pattern, opts) {
|
||||||
|
this.withFileTypes = !!opts.withFileTypes;
|
||||||
|
this.signal = opts.signal;
|
||||||
|
this.follow = !!opts.follow;
|
||||||
|
this.dot = !!opts.dot;
|
||||||
|
this.dotRelative = !!opts.dotRelative;
|
||||||
|
this.nodir = !!opts.nodir;
|
||||||
|
this.mark = !!opts.mark;
|
||||||
|
if (!opts.cwd) {
|
||||||
|
this.cwd = '';
|
||||||
|
}
|
||||||
|
else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
|
||||||
|
opts.cwd = (0, url_1.fileURLToPath)(opts.cwd);
|
||||||
|
}
|
||||||
|
this.cwd = opts.cwd || '';
|
||||||
|
this.root = opts.root;
|
||||||
|
this.magicalBraces = !!opts.magicalBraces;
|
||||||
|
this.nobrace = !!opts.nobrace;
|
||||||
|
this.noext = !!opts.noext;
|
||||||
|
this.realpath = !!opts.realpath;
|
||||||
|
this.absolute = opts.absolute;
|
||||||
|
this.noglobstar = !!opts.noglobstar;
|
||||||
|
this.matchBase = !!opts.matchBase;
|
||||||
|
this.maxDepth =
|
||||||
|
typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
|
||||||
|
this.stat = !!opts.stat;
|
||||||
|
this.ignore = opts.ignore;
|
||||||
|
if (this.withFileTypes && this.absolute !== undefined) {
|
||||||
|
throw new Error('cannot set absolute and withFileTypes:true');
|
||||||
|
}
|
||||||
|
if (typeof pattern === 'string') {
|
||||||
|
pattern = [pattern];
|
||||||
|
}
|
||||||
|
this.windowsPathsNoEscape =
|
||||||
|
!!opts.windowsPathsNoEscape ||
|
||||||
|
opts.allowWindowsEscape === false;
|
||||||
|
if (this.windowsPathsNoEscape) {
|
||||||
|
pattern = pattern.map(p => p.replace(/\\/g, '/'));
|
||||||
|
}
|
||||||
|
if (this.matchBase) {
|
||||||
|
if (opts.noglobstar) {
|
||||||
|
throw new TypeError('base matching requires globstar');
|
||||||
|
}
|
||||||
|
pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
|
||||||
|
}
|
||||||
|
this.pattern = pattern;
|
||||||
|
this.platform = opts.platform || defaultPlatform;
|
||||||
|
this.opts = { ...opts, platform: this.platform };
|
||||||
|
if (opts.scurry) {
|
||||||
|
this.scurry = opts.scurry;
|
||||||
|
if (opts.nocase !== undefined &&
|
||||||
|
opts.nocase !== opts.scurry.nocase) {
|
||||||
|
throw new Error('nocase option contradicts provided scurry option');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const Scurry = opts.platform === 'win32'
|
||||||
|
? path_scurry_1.PathScurryWin32
|
||||||
|
: opts.platform === 'darwin'
|
||||||
|
? path_scurry_1.PathScurryDarwin
|
||||||
|
: opts.platform
|
||||||
|
? path_scurry_1.PathScurryPosix
|
||||||
|
: path_scurry_1.PathScurry;
|
||||||
|
this.scurry = new Scurry(this.cwd, {
|
||||||
|
nocase: opts.nocase,
|
||||||
|
fs: opts.fs,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
this.nocase = this.scurry.nocase;
|
||||||
|
const mmo = {
|
||||||
|
// default nocase based on platform
|
||||||
|
...opts,
|
||||||
|
dot: this.dot,
|
||||||
|
matchBase: this.matchBase,
|
||||||
|
nobrace: this.nobrace,
|
||||||
|
nocase: this.nocase,
|
||||||
|
nocaseMagicOnly: true,
|
||||||
|
nocomment: true,
|
||||||
|
noext: this.noext,
|
||||||
|
nonegate: true,
|
||||||
|
optimizationLevel: 2,
|
||||||
|
platform: this.platform,
|
||||||
|
windowsPathsNoEscape: this.windowsPathsNoEscape,
|
||||||
|
};
|
||||||
|
const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo));
|
||||||
|
const [matchSet, globParts] = mms.reduce((set, m) => {
|
||||||
|
set[0].push(...m.set);
|
||||||
|
set[1].push(...m.globParts);
|
||||||
|
return set;
|
||||||
|
}, [[], []]);
|
||||||
|
this.patterns = matchSet.map((set, i) => {
|
||||||
|
return new pattern_js_1.Pattern(set, globParts[i], 0, this.platform);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async walk() {
|
||||||
|
// Walkers always return array of Path objects, so we just have to
|
||||||
|
// coerce them into the right shape. It will have already called
|
||||||
|
// realpath() if the option was set to do so, so we know that's cached.
|
||||||
|
// start out knowing the cwd, at least
|
||||||
|
return [
|
||||||
|
...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
|
||||||
|
...this.opts,
|
||||||
|
maxDepth: this.maxDepth !== Infinity
|
||||||
|
? this.maxDepth + this.scurry.cwd.depth()
|
||||||
|
: Infinity,
|
||||||
|
platform: this.platform,
|
||||||
|
nocase: this.nocase,
|
||||||
|
}).walk()),
|
||||||
|
];
|
||||||
|
}
|
||||||
|
walkSync() {
|
||||||
|
return [
|
||||||
|
...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
|
||||||
|
...this.opts,
|
||||||
|
maxDepth: this.maxDepth !== Infinity
|
||||||
|
? this.maxDepth + this.scurry.cwd.depth()
|
||||||
|
: Infinity,
|
||||||
|
platform: this.platform,
|
||||||
|
nocase: this.nocase,
|
||||||
|
}).walkSync(),
|
||||||
|
];
|
||||||
|
}
|
||||||
|
stream() {
|
||||||
|
return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
|
||||||
|
...this.opts,
|
||||||
|
maxDepth: this.maxDepth !== Infinity
|
||||||
|
? this.maxDepth + this.scurry.cwd.depth()
|
||||||
|
: Infinity,
|
||||||
|
platform: this.platform,
|
||||||
|
nocase: this.nocase,
|
||||||
|
}).stream();
|
||||||
|
}
|
||||||
|
streamSync() {
|
||||||
|
return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
|
||||||
|
...this.opts,
|
||||||
|
maxDepth: this.maxDepth !== Infinity
|
||||||
|
? this.maxDepth + this.scurry.cwd.depth()
|
||||||
|
: Infinity,
|
||||||
|
platform: this.platform,
|
||||||
|
nocase: this.nocase,
|
||||||
|
}).streamSync();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Default sync iteration function. Returns a Generator that
|
||||||
|
* iterates over the results.
|
||||||
|
*/
|
||||||
|
iterateSync() {
|
||||||
|
return this.streamSync()[Symbol.iterator]();
|
||||||
|
}
|
||||||
|
[Symbol.iterator]() {
|
||||||
|
return this.iterateSync();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Default async iteration function. Returns an AsyncGenerator that
|
||||||
|
* iterates over the results.
|
||||||
|
*/
|
||||||
|
iterate() {
|
||||||
|
return this.stream()[Symbol.asyncIterator]();
|
||||||
|
}
|
||||||
|
[Symbol.asyncIterator]() {
|
||||||
|
return this.iterate();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.Glob = Glob;
|
||||||
|
//# sourceMappingURL=glob.js.map
|
||||||
1
node_modules/glob/dist/cjs/glob.js.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/glob.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
14
node_modules/glob/dist/cjs/has-magic.d.ts
generated
vendored
Normal file
14
node_modules/glob/dist/cjs/has-magic.d.ts
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import { GlobOptions } from './glob.js';
|
||||||
|
/**
|
||||||
|
* Return true if the patterns provided contain any magic glob characters,
|
||||||
|
* given the options provided.
|
||||||
|
*
|
||||||
|
* Brace expansion is not considered "magic" unless the `magicalBraces` option
|
||||||
|
* is set, as brace expansion just turns one string into an array of strings.
|
||||||
|
* So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
|
||||||
|
* `'xby'` both do not contain any magic glob characters, and it's treated the
|
||||||
|
* same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
|
||||||
|
* is in the options, brace expansion _is_ treated as a pattern having magic.
|
||||||
|
*/
|
||||||
|
export declare const hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean;
|
||||||
|
//# sourceMappingURL=has-magic.d.ts.map
|
||||||
1
node_modules/glob/dist/cjs/has-magic.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/has-magic.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"has-magic.d.ts","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,WAAW,CAAA;AAEvC;;;;;;;;;;GAUG;AACH,eAAO,MAAM,QAAQ,YACV,MAAM,GAAG,MAAM,EAAE,YACjB,WAAW,KACnB,OAQF,CAAA"}
|
||||||
27
node_modules/glob/dist/cjs/has-magic.js
generated
vendored
Normal file
27
node_modules/glob/dist/cjs/has-magic.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.hasMagic = void 0;
|
||||||
|
const minimatch_1 = require("minimatch");
|
||||||
|
/**
|
||||||
|
* Return true if the patterns provided contain any magic glob characters,
|
||||||
|
* given the options provided.
|
||||||
|
*
|
||||||
|
* Brace expansion is not considered "magic" unless the `magicalBraces` option
|
||||||
|
* is set, as brace expansion just turns one string into an array of strings.
|
||||||
|
* So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
|
||||||
|
* `'xby'` both do not contain any magic glob characters, and it's treated the
|
||||||
|
* same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
|
||||||
|
* is in the options, brace expansion _is_ treated as a pattern having magic.
|
||||||
|
*/
|
||||||
|
const hasMagic = (pattern, options = {}) => {
|
||||||
|
if (!Array.isArray(pattern)) {
|
||||||
|
pattern = [pattern];
|
||||||
|
}
|
||||||
|
for (const p of pattern) {
|
||||||
|
if (new minimatch_1.Minimatch(p, options).hasMagic())
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
exports.hasMagic = hasMagic;
|
||||||
|
//# sourceMappingURL=has-magic.js.map
|
||||||
1
node_modules/glob/dist/cjs/has-magic.js.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/has-magic.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"has-magic.js","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":";;;AAAA,yCAAqC;AAGrC;;;;;;;;;;GAUG;AACI,MAAM,QAAQ,GAAG,CACtB,OAA0B,EAC1B,UAAuB,EAAE,EAChB,EAAE;IACX,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAC3B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;KACpB;IACD,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE;QACvB,IAAI,IAAI,qBAAS,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,QAAQ,EAAE;YAAE,OAAO,IAAI,CAAA;KACtD;IACD,OAAO,KAAK,CAAA;AACd,CAAC,CAAA;AAXY,QAAA,QAAQ,YAWpB"}
|
||||||
20
node_modules/glob/dist/cjs/ignore.d.ts
generated
vendored
Normal file
20
node_modules/glob/dist/cjs/ignore.d.ts
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { Minimatch } from 'minimatch';
|
||||||
|
import { Path } from 'path-scurry';
|
||||||
|
import { GlobWalkerOpts } from './walker.js';
|
||||||
|
export interface IgnoreLike {
|
||||||
|
ignored?: (p: Path) => boolean;
|
||||||
|
childrenIgnored?: (p: Path) => boolean;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Class used to process ignored patterns
|
||||||
|
*/
|
||||||
|
export declare class Ignore implements IgnoreLike {
|
||||||
|
relative: Minimatch[];
|
||||||
|
relativeChildren: Minimatch[];
|
||||||
|
absolute: Minimatch[];
|
||||||
|
absoluteChildren: Minimatch[];
|
||||||
|
constructor(ignored: string[], { nobrace, nocase, noext, noglobstar, platform, }: GlobWalkerOpts);
|
||||||
|
ignored(p: Path): boolean;
|
||||||
|
childrenIgnored(p: Path): boolean;
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=ignore.d.ts.map
|
||||||
1
node_modules/glob/dist/cjs/ignore.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/ignore.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"ignore.d.ts","sourceRoot":"","sources":["../../src/ignore.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,SAAS,EAAE,MAAM,WAAW,CAAA;AACrC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAElC,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAE5C,MAAM,WAAW,UAAU;IACzB,OAAO,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAA;IAC9B,eAAe,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAA;CACvC;AASD;;GAEG;AACH,qBAAa,MAAO,YAAW,UAAU;IACvC,QAAQ,EAAE,SAAS,EAAE,CAAA;IACrB,gBAAgB,EAAE,SAAS,EAAE,CAAA;IAC7B,QAAQ,EAAE,SAAS,EAAE,CAAA;IACrB,gBAAgB,EAAE,SAAS,EAAE,CAAA;gBAG3B,OAAO,EAAE,MAAM,EAAE,EACjB,EACE,OAAO,EACP,MAAM,EACN,KAAK,EACL,UAAU,EACV,QAA0B,GAC3B,EAAE,cAAc;IAiDnB,OAAO,CAAC,CAAC,EAAE,IAAI,GAAG,OAAO;IAczB,eAAe,CAAC,CAAC,EAAE,IAAI,GAAG,OAAO;CAWlC"}
|
||||||
103
node_modules/glob/dist/cjs/ignore.js
generated
vendored
Normal file
103
node_modules/glob/dist/cjs/ignore.js
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
"use strict";
|
||||||
|
// give it a pattern, and it'll be able to tell you if
|
||||||
|
// a given path should be ignored.
|
||||||
|
// Ignoring a path ignores its children if the pattern ends in /**
|
||||||
|
// Ignores are always parsed in dot:true mode
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.Ignore = void 0;
|
||||||
|
const minimatch_1 = require("minimatch");
|
||||||
|
const pattern_js_1 = require("./pattern.js");
|
||||||
|
const defaultPlatform = typeof process === 'object' &&
|
||||||
|
process &&
|
||||||
|
typeof process.platform === 'string'
|
||||||
|
? process.platform
|
||||||
|
: 'linux';
|
||||||
|
/**
|
||||||
|
* Class used to process ignored patterns
|
||||||
|
*/
|
||||||
|
class Ignore {
|
||||||
|
relative;
|
||||||
|
relativeChildren;
|
||||||
|
absolute;
|
||||||
|
absoluteChildren;
|
||||||
|
constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) {
|
||||||
|
this.relative = [];
|
||||||
|
this.absolute = [];
|
||||||
|
this.relativeChildren = [];
|
||||||
|
this.absoluteChildren = [];
|
||||||
|
const mmopts = {
|
||||||
|
dot: true,
|
||||||
|
nobrace,
|
||||||
|
nocase,
|
||||||
|
noext,
|
||||||
|
noglobstar,
|
||||||
|
optimizationLevel: 2,
|
||||||
|
platform,
|
||||||
|
nocomment: true,
|
||||||
|
nonegate: true,
|
||||||
|
};
|
||||||
|
// this is a little weird, but it gives us a clean set of optimized
|
||||||
|
// minimatch matchers, without getting tripped up if one of them
|
||||||
|
// ends in /** inside a brace section, and it's only inefficient at
|
||||||
|
// the start of the walk, not along it.
|
||||||
|
// It'd be nice if the Pattern class just had a .test() method, but
|
||||||
|
// handling globstars is a bit of a pita, and that code already lives
|
||||||
|
// in minimatch anyway.
|
||||||
|
// Another way would be if maybe Minimatch could take its set/globParts
|
||||||
|
// as an option, and then we could at least just use Pattern to test
|
||||||
|
// for absolute-ness.
|
||||||
|
// Yet another way, Minimatch could take an array of glob strings, and
|
||||||
|
// a cwd option, and do the right thing.
|
||||||
|
for (const ign of ignored) {
|
||||||
|
const mm = new minimatch_1.Minimatch(ign, mmopts);
|
||||||
|
for (let i = 0; i < mm.set.length; i++) {
|
||||||
|
const parsed = mm.set[i];
|
||||||
|
const globParts = mm.globParts[i];
|
||||||
|
const p = new pattern_js_1.Pattern(parsed, globParts, 0, platform);
|
||||||
|
const m = new minimatch_1.Minimatch(p.globString(), mmopts);
|
||||||
|
const children = globParts[globParts.length - 1] === '**';
|
||||||
|
const absolute = p.isAbsolute();
|
||||||
|
if (absolute)
|
||||||
|
this.absolute.push(m);
|
||||||
|
else
|
||||||
|
this.relative.push(m);
|
||||||
|
if (children) {
|
||||||
|
if (absolute)
|
||||||
|
this.absoluteChildren.push(m);
|
||||||
|
else
|
||||||
|
this.relativeChildren.push(m);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ignored(p) {
|
||||||
|
const fullpath = p.fullpath();
|
||||||
|
const fullpaths = `${fullpath}/`;
|
||||||
|
const relative = p.relative() || '.';
|
||||||
|
const relatives = `${relative}/`;
|
||||||
|
for (const m of this.relative) {
|
||||||
|
if (m.match(relative) || m.match(relatives))
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
for (const m of this.absolute) {
|
||||||
|
if (m.match(fullpath) || m.match(fullpaths))
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
childrenIgnored(p) {
|
||||||
|
const fullpath = p.fullpath() + '/';
|
||||||
|
const relative = (p.relative() || '.') + '/';
|
||||||
|
for (const m of this.relativeChildren) {
|
||||||
|
if (m.match(relative))
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
for (const m of this.absoluteChildren) {
|
||||||
|
if (m.match(fullpath))
|
||||||
|
true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.Ignore = Ignore;
|
||||||
|
//# sourceMappingURL=ignore.js.map
|
||||||
1
node_modules/glob/dist/cjs/ignore.js.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/ignore.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"ignore.js","sourceRoot":"","sources":["../../src/ignore.ts"],"names":[],"mappings":";AAAA,sDAAsD;AACtD,kCAAkC;AAClC,kEAAkE;AAClE,6CAA6C;;;AAE7C,yCAAqC;AAErC,6CAAsC;AAQtC,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAEb;;GAEG;AACH,MAAa,MAAM;IACjB,QAAQ,CAAa;IACrB,gBAAgB,CAAa;IAC7B,QAAQ,CAAa;IACrB,gBAAgB,CAAa;IAE7B,YACE,OAAiB,EACjB,EACE,OAAO,EACP,MAAM,EACN,KAAK,EACL,UAAU,EACV,QAAQ,GAAG,eAAe,GACX;QAEjB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAA;QAClB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAA;QAClB,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,MAAM,MAAM,GAAG;YACb,GAAG,EAAE,IAAI;YACT,OAAO;YACP,MAAM;YACN,KAAK;YACL,UAAU;YACV,iBAAiB,EAAE,CAAC;YACpB,QAAQ;YACR,SAAS,EAAE,IAAI;YACf,QAAQ,EAAE,IAAI;SACf,CAAA;QAED,mEAAmE;QACnE,gEAAgE;QAChE,mEAAmE;QACnE,uCAAuC;QACvC,mEAAmE;QACnE,qEAAqE;QACrE,uBAAuB;QACvB,uEAAuE;QACvE,oEAAoE;QACpE,qBAAqB;QACrB,sEAAsE;QACtE,wCAAwC;QACxC,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;YACzB,MAAM,EAAE,GAAG,IAAI,qBAAS,CAAC,GAAG,EAAE,MAAM,CAAC,CAAA;YACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBACtC,MAAM,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;gBACxB,MAAM,SAAS,GAAG,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;gBACjC,MAAM,CAAC,GAAG,IAAI,oBAAO,CAAC,MAAM,EAAE,SAAS,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAA;gBACrD,MAAM,CAAC,GAAG,IAAI,qBAAS,CAAC,CAAC,CAAC,UAAU,EAAE,EAAE,MAAM,CAAC,CAAA;gBAC/C,MAAM,QAAQ,GAAG,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,CAAA;gBACzD,MAAM,QAAQ,GAAG,CAAC,CAAC,UAAU,EAAE,CAAA;gBAC/B,IAAI,QAAQ;oBAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;;oBAC9B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;gBAC1B,IAAI,QAAQ,EAAE;oBACZ,IAAI,QAAQ;wBAAE,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;;wBACtC,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;iBACnC;aACF;SACF;IACH,CAAC;IAED,OAAO,CAAC,CAAO;QACb,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAA;QAC7B,MAAM,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAA;QAChC,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAA;QACpC,MAAM,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAA;QAChC,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE;YAC7B,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;gBAAE,OAAO,IAAI,CAAA;SACzD;QACD,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE;YAC7B,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;gBAAE,OAAO,IAAI,CAAA;SACzD;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAED,eAAe,CAAC,CAAO;QACrB,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,GAAG,GAAG,CAAA;QACnC,MAAM,QAAQ,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,GAAG,CAAA;QAC5C,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,gBAAgB,EAAE;YACrC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAAE,OAAO,IAAI,CAAA;SACnC;QACD,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,gBAAgB,EAAE;YACrC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAAE,IAAI,CAAA;SAC5B;QACD,OAAO,KAAK,CAAA;IACd,CAAC;CACF;AAxFD,wBAwFC"}
|
||||||
39
node_modules/glob/dist/cjs/index-cjs.d.ts
generated
vendored
Normal file
39
node_modules/glob/dist/cjs/index-cjs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
declare const _default: typeof import("./index.js").glob & {
|
||||||
|
glob: typeof import("./index.js").glob;
|
||||||
|
globSync: typeof import("./index.js").globSync;
|
||||||
|
globStream: typeof import("./index.js").globStream;
|
||||||
|
globStreamSync: typeof import("./index.js").globStreamSync;
|
||||||
|
globIterate: typeof import("./index.js").globIterate;
|
||||||
|
globIterateSync: typeof import("./index.js").globIterateSync;
|
||||||
|
Glob: typeof import("./glob.js").Glob;
|
||||||
|
hasMagic: (pattern: string | string[], options?: import("./glob.js").GlobOptions) => boolean;
|
||||||
|
escape: (s: string, { windowsPathsNoEscape, }?: Pick<import("minimatch").MinimatchOptions, "windowsPathsNoEscape"> | undefined) => string;
|
||||||
|
unescape: (s: string, { windowsPathsNoEscape, }?: Pick<import("minimatch").MinimatchOptions, "windowsPathsNoEscape"> | undefined) => string;
|
||||||
|
} & {
|
||||||
|
default: typeof import("./index.js").glob & {
|
||||||
|
glob: typeof import("./index.js").glob;
|
||||||
|
globSync: typeof import("./index.js").globSync;
|
||||||
|
globStream: typeof import("./index.js").globStream;
|
||||||
|
globStreamSync: typeof import("./index.js").globStreamSync;
|
||||||
|
globIterate: typeof import("./index.js").globIterate;
|
||||||
|
globIterateSync: typeof import("./index.js").globIterateSync;
|
||||||
|
Glob: typeof import("./glob.js").Glob;
|
||||||
|
hasMagic: (pattern: string | string[], options?: import("./glob.js").GlobOptions) => boolean;
|
||||||
|
escape: (s: string, { windowsPathsNoEscape, }?: Pick<import("minimatch").MinimatchOptions, "windowsPathsNoEscape"> | undefined) => string;
|
||||||
|
unescape: (s: string, { windowsPathsNoEscape, }?: Pick<import("minimatch").MinimatchOptions, "windowsPathsNoEscape"> | undefined) => string;
|
||||||
|
};
|
||||||
|
glob: typeof import("./index.js").glob & {
|
||||||
|
glob: typeof import("./index.js").glob;
|
||||||
|
globSync: typeof import("./index.js").globSync;
|
||||||
|
globStream: typeof import("./index.js").globStream;
|
||||||
|
globStreamSync: typeof import("./index.js").globStreamSync;
|
||||||
|
globIterate: typeof import("./index.js").globIterate;
|
||||||
|
globIterateSync: typeof import("./index.js").globIterateSync;
|
||||||
|
Glob: typeof import("./glob.js").Glob;
|
||||||
|
hasMagic: (pattern: string | string[], options?: import("./glob.js").GlobOptions) => boolean;
|
||||||
|
escape: (s: string, { windowsPathsNoEscape, }?: Pick<import("minimatch").MinimatchOptions, "windowsPathsNoEscape"> | undefined) => string;
|
||||||
|
unescape: (s: string, { windowsPathsNoEscape, }?: Pick<import("minimatch").MinimatchOptions, "windowsPathsNoEscape"> | undefined) => string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
export = _default;
|
||||||
|
//# sourceMappingURL=index-cjs.d.ts.map
|
||||||
1
node_modules/glob/dist/cjs/index-cjs.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/index-cjs.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"index-cjs.d.ts","sourceRoot":"","sources":["../../src/index-cjs.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEA,kBAAqD"}
|
||||||
7
node_modules/glob/dist/cjs/index-cjs.js
generated
vendored
Normal file
7
node_modules/glob/dist/cjs/index-cjs.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
const index_js_1 = __importDefault(require("./index.js"));
|
||||||
|
module.exports = Object.assign(index_js_1.default, { default: index_js_1.default, glob: index_js_1.default });
|
||||||
|
//# sourceMappingURL=index-cjs.js.map
|
||||||
1
node_modules/glob/dist/cjs/index-cjs.js.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/index-cjs.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"index-cjs.js","sourceRoot":"","sources":["../../src/index-cjs.ts"],"names":[],"mappings":";;;;AAAA,0DAA6B;AAE7B,iBAAS,MAAM,CAAC,MAAM,CAAC,kBAAI,EAAE,EAAE,OAAO,EAAE,kBAAI,EAAE,IAAI,EAAJ,kBAAI,EAAE,CAAC,CAAA"}
|
||||||
72
node_modules/glob/dist/cjs/index.d.ts
generated
vendored
Normal file
72
node_modules/glob/dist/cjs/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
import Minipass from 'minipass';
|
||||||
|
import { Path } from 'path-scurry';
|
||||||
|
import type { GlobOptions, GlobOptionsWithFileTypesFalse, GlobOptionsWithFileTypesTrue, GlobOptionsWithFileTypesUnset } from './glob.js';
|
||||||
|
import { Glob } from './glob.js';
|
||||||
|
/**
|
||||||
|
* Syncronous form of {@link globStream}. Will read all the matches as fast as
|
||||||
|
* you consume them, even all in a single tick if you consume them immediately,
|
||||||
|
* but will still respond to backpressure if they're not consumed immediately.
|
||||||
|
*/
|
||||||
|
export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Minipass<Path, Path>;
|
||||||
|
export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Minipass<string, string>;
|
||||||
|
export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesUnset): Minipass<string, string>;
|
||||||
|
export declare function globStreamSync(pattern: string | string[], options: GlobOptions): Minipass<Path, Path> | Minipass<string, string>;
|
||||||
|
/**
|
||||||
|
* Return a stream that emits all the strings or `Path` objects and
|
||||||
|
* then emits `end` when completed.
|
||||||
|
*/
|
||||||
|
export declare function globStream(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Minipass<string, string>;
|
||||||
|
export declare function globStream(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Minipass<Path, Path>;
|
||||||
|
export declare function globStream(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Minipass<string, string>;
|
||||||
|
export declare function globStream(pattern: string | string[], options: GlobOptions): Minipass<Path, Path> | Minipass<string, string>;
|
||||||
|
/**
|
||||||
|
* Synchronous form of {@link glob}
|
||||||
|
*/
|
||||||
|
export declare function globSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): string[];
|
||||||
|
export declare function globSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Path[];
|
||||||
|
export declare function globSync(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): string[];
|
||||||
|
export declare function globSync(pattern: string | string[], options: GlobOptions): Path[] | string[];
|
||||||
|
/**
|
||||||
|
* Perform an asynchronous glob search for the pattern(s) specified. Returns
|
||||||
|
* [Path](https://isaacs.github.io/path-scurry/classes/PathBase) objects if the
|
||||||
|
* {@link withFileTypes} option is set to `true`. See {@link GlobOptions} for
|
||||||
|
* full option descriptions.
|
||||||
|
*/
|
||||||
|
export declare function glob(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Promise<string[]>;
|
||||||
|
export declare function glob(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Promise<Path[]>;
|
||||||
|
export declare function glob(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Promise<string[]>;
|
||||||
|
export declare function glob(pattern: string | string[], options: GlobOptions): Promise<Path[] | string[]>;
|
||||||
|
/**
|
||||||
|
* Return an async iterator for walking glob pattern matches.
|
||||||
|
*/
|
||||||
|
export declare function globIterate(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): AsyncGenerator<string, void, void>;
|
||||||
|
export declare function globIterate(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): AsyncGenerator<Path, void, void>;
|
||||||
|
export declare function globIterate(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): AsyncGenerator<string, void, void>;
|
||||||
|
export declare function globIterate(pattern: string | string[], options: GlobOptions): AsyncGenerator<Path, void, void> | AsyncGenerator<string, void, void>;
|
||||||
|
/**
|
||||||
|
* Return a sync iterator for walking glob pattern matches.
|
||||||
|
*/
|
||||||
|
export declare function globIterateSync(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Generator<string, void, void>;
|
||||||
|
export declare function globIterateSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Generator<Path, void, void>;
|
||||||
|
export declare function globIterateSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Generator<string, void, void>;
|
||||||
|
export declare function globIterateSync(pattern: string | string[], options: GlobOptions): Generator<Path, void, void> | Generator<string, void, void>;
|
||||||
|
export { escape, unescape } from 'minimatch';
|
||||||
|
export { Glob } from './glob.js';
|
||||||
|
export type { GlobOptions, GlobOptionsWithFileTypesFalse, GlobOptionsWithFileTypesTrue, GlobOptionsWithFileTypesUnset, } from './glob.js';
|
||||||
|
export { hasMagic } from './has-magic.js';
|
||||||
|
export type { IgnoreLike } from './ignore.js';
|
||||||
|
export type { MatchStream } from './walker.js';
|
||||||
|
declare const _default: typeof glob & {
|
||||||
|
glob: typeof glob;
|
||||||
|
globSync: typeof globSync;
|
||||||
|
globStream: typeof globStream;
|
||||||
|
globStreamSync: typeof globStreamSync;
|
||||||
|
globIterate: typeof globIterate;
|
||||||
|
globIterateSync: typeof globIterateSync;
|
||||||
|
Glob: typeof Glob;
|
||||||
|
hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean;
|
||||||
|
escape: (s: string, { windowsPathsNoEscape, }?: Pick<import("minimatch").MinimatchOptions, "windowsPathsNoEscape"> | undefined) => string;
|
||||||
|
unescape: (s: string, { windowsPathsNoEscape, }?: Pick<import("minimatch").MinimatchOptions, "windowsPathsNoEscape"> | undefined) => string;
|
||||||
|
};
|
||||||
|
export default _default;
|
||||||
|
//# sourceMappingURL=index.d.ts.map
|
||||||
1
node_modules/glob/dist/cjs/index.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/index.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,QAAQ,MAAM,UAAU,CAAA;AAC/B,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,KAAK,EACV,WAAW,EACX,6BAA6B,EAC7B,4BAA4B,EAC5B,6BAA6B,EAC9B,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAGhC;;;;GAIG;AACH,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;AACvB,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAQlD;;;GAGG;AACH,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;AACvB,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAQlD;;GAEG;AACH,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,MAAM,EAAE,CAAA;AACX,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,IAAI,EAAE,CAAA;AACT,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,MAAM,EAAE,CAAA;AACX,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,IAAI,EAAE,GAAG,MAAM,EAAE,CAAA;AAQpB;;;;;GAKG;AACH,wBAAsB,IAAI,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;AACpB,wBAAsB,IAAI,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAA;AAClB,wBAAsB,IAAI,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;AACpB,wBAAsB,IAAI,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,OAAO,CAAC,IAAI,EAAE,GAAG,MAAM,EAAE,CAAC,CAAA;AAQ7B;;GAEG;AACH,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACrC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACnC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACrC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,GAAG,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAQxE;;GAEG;AACH,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAChC,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAC9B,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAChC,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAS9D,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC5C,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAChC,YAAY,EACV,WAAW,EACX,6BAA6B,EAC7B,4BAA4B,EAC5B,6BAA6B,GAC9B,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AACzC,YAAY,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,YAAY,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;;;;;;;;;;;;;AAG9C,wBAWE"}
|
||||||
52
node_modules/glob/dist/cjs/index.js
generated
vendored
Normal file
52
node_modules/glob/dist/cjs/index.js
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.hasMagic = exports.Glob = exports.unescape = exports.escape = exports.globIterateSync = exports.globIterate = exports.glob = exports.globSync = exports.globStream = exports.globStreamSync = void 0;
|
||||||
|
const minimatch_1 = require("minimatch");
|
||||||
|
const glob_js_1 = require("./glob.js");
|
||||||
|
const has_magic_js_1 = require("./has-magic.js");
|
||||||
|
function globStreamSync(pattern, options = {}) {
|
||||||
|
return new glob_js_1.Glob(pattern, options).streamSync();
|
||||||
|
}
|
||||||
|
exports.globStreamSync = globStreamSync;
|
||||||
|
function globStream(pattern, options = {}) {
|
||||||
|
return new glob_js_1.Glob(pattern, options).stream();
|
||||||
|
}
|
||||||
|
exports.globStream = globStream;
|
||||||
|
function globSync(pattern, options = {}) {
|
||||||
|
return new glob_js_1.Glob(pattern, options).walkSync();
|
||||||
|
}
|
||||||
|
exports.globSync = globSync;
|
||||||
|
async function glob(pattern, options = {}) {
|
||||||
|
return new glob_js_1.Glob(pattern, options).walk();
|
||||||
|
}
|
||||||
|
exports.glob = glob;
|
||||||
|
function globIterate(pattern, options = {}) {
|
||||||
|
return new glob_js_1.Glob(pattern, options).iterate();
|
||||||
|
}
|
||||||
|
exports.globIterate = globIterate;
|
||||||
|
function globIterateSync(pattern, options = {}) {
|
||||||
|
return new glob_js_1.Glob(pattern, options).iterateSync();
|
||||||
|
}
|
||||||
|
exports.globIterateSync = globIterateSync;
|
||||||
|
/* c8 ignore start */
|
||||||
|
var minimatch_2 = require("minimatch");
|
||||||
|
Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return minimatch_2.escape; } });
|
||||||
|
Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return minimatch_2.unescape; } });
|
||||||
|
var glob_js_2 = require("./glob.js");
|
||||||
|
Object.defineProperty(exports, "Glob", { enumerable: true, get: function () { return glob_js_2.Glob; } });
|
||||||
|
var has_magic_js_2 = require("./has-magic.js");
|
||||||
|
Object.defineProperty(exports, "hasMagic", { enumerable: true, get: function () { return has_magic_js_2.hasMagic; } });
|
||||||
|
/* c8 ignore stop */
|
||||||
|
exports.default = Object.assign(glob, {
|
||||||
|
glob,
|
||||||
|
globSync,
|
||||||
|
globStream,
|
||||||
|
globStreamSync,
|
||||||
|
globIterate,
|
||||||
|
globIterateSync,
|
||||||
|
Glob: glob_js_1.Glob,
|
||||||
|
hasMagic: has_magic_js_1.hasMagic,
|
||||||
|
escape: minimatch_1.escape,
|
||||||
|
unescape: minimatch_1.unescape,
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=index.js.map
|
||||||
1
node_modules/glob/dist/cjs/index.js.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/index.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAAA,yCAA4C;AAS5C,uCAAgC;AAChC,iDAAyC;AAuBzC,SAAgB,cAAc,CAC5B,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,UAAU,EAAE,CAAA;AAChD,CAAC;AALD,wCAKC;AAsBD,SAAgB,UAAU,CACxB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;AAC5C,CAAC;AALD,gCAKC;AAqBD,SAAgB,QAAQ,CACtB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,QAAQ,EAAE,CAAA;AAC9C,CAAC;AALD,4BAKC;AAwBM,KAAK,UAAU,IAAI,CACxB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAA;AAC1C,CAAC;AALD,oBAKC;AAqBD,SAAgB,WAAW,CACzB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,OAAO,EAAE,CAAA;AAC7C,CAAC;AALD,kCAKC;AAqBD,SAAgB,eAAe,CAC7B,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,WAAW,EAAE,CAAA;AACjD,CAAC;AALD,0CAKC;AAED,qBAAqB;AACrB,uCAA4C;AAAnC,mGAAA,MAAM,OAAA;AAAE,qGAAA,QAAQ,OAAA;AACzB,qCAAgC;AAAvB,+FAAA,IAAI,OAAA;AAOb,+CAAyC;AAAhC,wGAAA,QAAQ,OAAA;AAGjB,oBAAoB;AAEpB,kBAAe,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE;IACjC,IAAI;IACJ,QAAQ;IACR,UAAU;IACV,cAAc;IACd,WAAW;IACX,eAAe;IACf,IAAI,EAAJ,cAAI;IACJ,QAAQ,EAAR,uBAAQ;IACR,MAAM,EAAN,kBAAM;IACN,QAAQ,EAAR,oBAAQ;CACT,CAAC,CAAA"}
|
||||||
3
node_modules/glob/dist/cjs/package.json
generated
vendored
Normal file
3
node_modules/glob/dist/cjs/package.json
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"type": "commonjs"
|
||||||
|
}
|
||||||
77
node_modules/glob/dist/cjs/pattern.d.ts
generated
vendored
Normal file
77
node_modules/glob/dist/cjs/pattern.d.ts
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
import { GLOBSTAR } from 'minimatch';
|
||||||
|
export type MMPattern = string | RegExp | typeof GLOBSTAR;
|
||||||
|
export type PatternList = [p: MMPattern, ...rest: MMPattern[]];
|
||||||
|
export type UNCPatternList = [
|
||||||
|
p0: '',
|
||||||
|
p1: '',
|
||||||
|
p2: string,
|
||||||
|
p3: string,
|
||||||
|
...rest: MMPattern[]
|
||||||
|
];
|
||||||
|
export type DrivePatternList = [p0: string, ...rest: MMPattern[]];
|
||||||
|
export type AbsolutePatternList = [p0: '', ...rest: MMPattern[]];
|
||||||
|
export type GlobList = [p: string, ...rest: string[]];
|
||||||
|
/**
|
||||||
|
* An immutable-ish view on an array of glob parts and their parsed
|
||||||
|
* results
|
||||||
|
*/
|
||||||
|
export declare class Pattern {
|
||||||
|
#private;
|
||||||
|
readonly length: number;
|
||||||
|
constructor(patternList: MMPattern[], globList: string[], index: number, platform: NodeJS.Platform);
|
||||||
|
/**
|
||||||
|
* The first entry in the parsed list of patterns
|
||||||
|
*/
|
||||||
|
pattern(): MMPattern;
|
||||||
|
/**
|
||||||
|
* true of if pattern() returns a string
|
||||||
|
*/
|
||||||
|
isString(): boolean;
|
||||||
|
/**
|
||||||
|
* true of if pattern() returns GLOBSTAR
|
||||||
|
*/
|
||||||
|
isGlobstar(): boolean;
|
||||||
|
/**
|
||||||
|
* true if pattern() returns a regexp
|
||||||
|
*/
|
||||||
|
isRegExp(): boolean;
|
||||||
|
/**
|
||||||
|
* The /-joined set of glob parts that make up this pattern
|
||||||
|
*/
|
||||||
|
globString(): string;
|
||||||
|
/**
|
||||||
|
* true if there are more pattern parts after this one
|
||||||
|
*/
|
||||||
|
hasMore(): boolean;
|
||||||
|
/**
|
||||||
|
* The rest of the pattern after this part, or null if this is the end
|
||||||
|
*/
|
||||||
|
rest(): Pattern | null;
|
||||||
|
/**
|
||||||
|
* true if the pattern represents a //unc/path/ on windows
|
||||||
|
*/
|
||||||
|
isUNC(): boolean;
|
||||||
|
/**
|
||||||
|
* True if the pattern starts with a drive letter on Windows
|
||||||
|
*/
|
||||||
|
isDrive(): boolean;
|
||||||
|
/**
|
||||||
|
* True if the pattern is rooted on an absolute path
|
||||||
|
*/
|
||||||
|
isAbsolute(): boolean;
|
||||||
|
/**
|
||||||
|
* consume the root of the pattern, and return it
|
||||||
|
*/
|
||||||
|
root(): string;
|
||||||
|
/**
|
||||||
|
* Check to see if the current globstar pattern is allowed to follow
|
||||||
|
* a symbolic link.
|
||||||
|
*/
|
||||||
|
checkFollowGlobstar(): boolean;
|
||||||
|
/**
|
||||||
|
* Mark that the current globstar pattern is following a symbolic link
|
||||||
|
*/
|
||||||
|
markFollowGlobstar(): boolean;
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=pattern.d.ts.map
|
||||||
1
node_modules/glob/dist/cjs/pattern.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/pattern.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"pattern.d.ts","sourceRoot":"","sources":["../../src/pattern.ts"],"names":[],"mappings":";AAEA,OAAO,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AACpC,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,QAAQ,CAAA;AAGzD,MAAM,MAAM,WAAW,GAAG,CAAC,CAAC,EAAE,SAAS,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAC9D,MAAM,MAAM,cAAc,GAAG;IAC3B,EAAE,EAAE,EAAE;IACN,EAAE,EAAE,EAAE;IACN,EAAE,EAAE,MAAM;IACV,EAAE,EAAE,MAAM;IACV,GAAG,IAAI,EAAE,SAAS,EAAE;CACrB,CAAA;AACD,MAAM,MAAM,gBAAgB,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AACjE,MAAM,MAAM,mBAAmB,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAChE,MAAM,MAAM,QAAQ,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;AAMrD;;;GAGG;AACH,qBAAa,OAAO;;IAIlB,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAA;gBAUrB,WAAW,EAAE,SAAS,EAAE,EACxB,QAAQ,EAAE,MAAM,EAAE,EAClB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,CAAC,QAAQ;IA6D3B;;OAEG;IACH,OAAO,IAAI,SAAS;IAIpB;;OAEG;IACH,QAAQ,IAAI,OAAO;IAGnB;;OAEG;IACH,UAAU,IAAI,OAAO;IAGrB;;OAEG;IACH,QAAQ,IAAI,OAAO;IAInB;;OAEG;IACH,UAAU,IAAI,MAAM;IAUpB;;OAEG;IACH,OAAO,IAAI,OAAO;IAIlB;;OAEG;IACH,IAAI,IAAI,OAAO,GAAG,IAAI;IAetB;;OAEG;IACH,KAAK,IAAI,OAAO;IAoBhB;;OAEG;IACH,OAAO,IAAI,OAAO;IAelB;;OAEG;IACH,UAAU,IAAI,OAAO;IAUrB;;OAEG;IACH,IAAI,IAAI,MAAM;IAOd;;;OAGG;IACH,mBAAmB,IAAI,OAAO;IAQ9B;;OAEG;IACH,kBAAkB,IAAI,OAAO;CAM9B"}
|
||||||
219
node_modules/glob/dist/cjs/pattern.js
generated
vendored
Normal file
219
node_modules/glob/dist/cjs/pattern.js
generated
vendored
Normal file
@@ -0,0 +1,219 @@
|
|||||||
|
"use strict";
|
||||||
|
// this is just a very light wrapper around 2 arrays with an offset index
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.Pattern = void 0;
|
||||||
|
const minimatch_1 = require("minimatch");
|
||||||
|
const isPatternList = (pl) => pl.length >= 1;
|
||||||
|
const isGlobList = (gl) => gl.length >= 1;
|
||||||
|
/**
|
||||||
|
* An immutable-ish view on an array of glob parts and their parsed
|
||||||
|
* results
|
||||||
|
*/
|
||||||
|
class Pattern {
|
||||||
|
#patternList;
|
||||||
|
#globList;
|
||||||
|
#index;
|
||||||
|
length;
|
||||||
|
#platform;
|
||||||
|
#rest;
|
||||||
|
#globString;
|
||||||
|
#isDrive;
|
||||||
|
#isUNC;
|
||||||
|
#isAbsolute;
|
||||||
|
#followGlobstar = true;
|
||||||
|
constructor(patternList, globList, index, platform) {
|
||||||
|
if (!isPatternList(patternList)) {
|
||||||
|
throw new TypeError('empty pattern list');
|
||||||
|
}
|
||||||
|
if (!isGlobList(globList)) {
|
||||||
|
throw new TypeError('empty glob list');
|
||||||
|
}
|
||||||
|
if (globList.length !== patternList.length) {
|
||||||
|
throw new TypeError('mismatched pattern list and glob list lengths');
|
||||||
|
}
|
||||||
|
this.length = patternList.length;
|
||||||
|
if (index < 0 || index >= this.length) {
|
||||||
|
throw new TypeError('index out of range');
|
||||||
|
}
|
||||||
|
this.#patternList = patternList;
|
||||||
|
this.#globList = globList;
|
||||||
|
this.#index = index;
|
||||||
|
this.#platform = platform;
|
||||||
|
// normalize root entries of absolute patterns on initial creation.
|
||||||
|
if (this.#index === 0) {
|
||||||
|
// c: => ['c:/']
|
||||||
|
// C:/ => ['C:/']
|
||||||
|
// C:/x => ['C:/', 'x']
|
||||||
|
// //host/share => ['//host/share/']
|
||||||
|
// //host/share/ => ['//host/share/']
|
||||||
|
// //host/share/x => ['//host/share/', 'x']
|
||||||
|
// /etc => ['/', 'etc']
|
||||||
|
// / => ['/']
|
||||||
|
if (this.isUNC()) {
|
||||||
|
// '' / '' / 'host' / 'share'
|
||||||
|
const [p0, p1, p2, p3, ...prest] = this.#patternList;
|
||||||
|
const [g0, g1, g2, g3, ...grest] = this.#globList;
|
||||||
|
if (prest[0] === '') {
|
||||||
|
// ends in /
|
||||||
|
prest.shift();
|
||||||
|
grest.shift();
|
||||||
|
}
|
||||||
|
const p = [p0, p1, p2, p3, ''].join('/');
|
||||||
|
const g = [g0, g1, g2, g3, ''].join('/');
|
||||||
|
this.#patternList = [p, ...prest];
|
||||||
|
this.#globList = [g, ...grest];
|
||||||
|
this.length = this.#patternList.length;
|
||||||
|
}
|
||||||
|
else if (this.isDrive() || this.isAbsolute()) {
|
||||||
|
const [p1, ...prest] = this.#patternList;
|
||||||
|
const [g1, ...grest] = this.#globList;
|
||||||
|
if (prest[0] === '') {
|
||||||
|
// ends in /
|
||||||
|
prest.shift();
|
||||||
|
grest.shift();
|
||||||
|
}
|
||||||
|
const p = p1 + '/';
|
||||||
|
const g = g1 + '/';
|
||||||
|
this.#patternList = [p, ...prest];
|
||||||
|
this.#globList = [g, ...grest];
|
||||||
|
this.length = this.#patternList.length;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The first entry in the parsed list of patterns
|
||||||
|
*/
|
||||||
|
pattern() {
|
||||||
|
return this.#patternList[this.#index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* true of if pattern() returns a string
|
||||||
|
*/
|
||||||
|
isString() {
|
||||||
|
return typeof this.#patternList[this.#index] === 'string';
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* true of if pattern() returns GLOBSTAR
|
||||||
|
*/
|
||||||
|
isGlobstar() {
|
||||||
|
return this.#patternList[this.#index] === minimatch_1.GLOBSTAR;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* true if pattern() returns a regexp
|
||||||
|
*/
|
||||||
|
isRegExp() {
|
||||||
|
return this.#patternList[this.#index] instanceof RegExp;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The /-joined set of glob parts that make up this pattern
|
||||||
|
*/
|
||||||
|
globString() {
|
||||||
|
return (this.#globString =
|
||||||
|
this.#globString ||
|
||||||
|
(this.#index === 0
|
||||||
|
? this.isAbsolute()
|
||||||
|
? this.#globList[0] + this.#globList.slice(1).join('/')
|
||||||
|
: this.#globList.join('/')
|
||||||
|
: this.#globList.slice(this.#index).join('/')));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* true if there are more pattern parts after this one
|
||||||
|
*/
|
||||||
|
hasMore() {
|
||||||
|
return this.length > this.#index + 1;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The rest of the pattern after this part, or null if this is the end
|
||||||
|
*/
|
||||||
|
rest() {
|
||||||
|
if (this.#rest !== undefined)
|
||||||
|
return this.#rest;
|
||||||
|
if (!this.hasMore())
|
||||||
|
return (this.#rest = null);
|
||||||
|
this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
|
||||||
|
this.#rest.#isAbsolute = this.#isAbsolute;
|
||||||
|
this.#rest.#isUNC = this.#isUNC;
|
||||||
|
this.#rest.#isDrive = this.#isDrive;
|
||||||
|
return this.#rest;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* true if the pattern represents a //unc/path/ on windows
|
||||||
|
*/
|
||||||
|
isUNC() {
|
||||||
|
const pl = this.#patternList;
|
||||||
|
return this.#isUNC !== undefined
|
||||||
|
? this.#isUNC
|
||||||
|
: (this.#isUNC =
|
||||||
|
this.#platform === 'win32' &&
|
||||||
|
this.#index === 0 &&
|
||||||
|
pl[0] === '' &&
|
||||||
|
pl[1] === '' &&
|
||||||
|
typeof pl[2] === 'string' &&
|
||||||
|
!!pl[2] &&
|
||||||
|
typeof pl[3] === 'string' &&
|
||||||
|
!!pl[3]);
|
||||||
|
}
|
||||||
|
// pattern like C:/...
|
||||||
|
// split = ['C:', ...]
|
||||||
|
// XXX: would be nice to handle patterns like `c:*` to test the cwd
|
||||||
|
// in c: for *, but I don't know of a way to even figure out what that
|
||||||
|
// cwd is without actually chdir'ing into it?
|
||||||
|
/**
|
||||||
|
* True if the pattern starts with a drive letter on Windows
|
||||||
|
*/
|
||||||
|
isDrive() {
|
||||||
|
const pl = this.#patternList;
|
||||||
|
return this.#isDrive !== undefined
|
||||||
|
? this.#isDrive
|
||||||
|
: (this.#isDrive =
|
||||||
|
this.#platform === 'win32' &&
|
||||||
|
this.#index === 0 &&
|
||||||
|
this.length > 1 &&
|
||||||
|
typeof pl[0] === 'string' &&
|
||||||
|
/^[a-z]:$/i.test(pl[0]));
|
||||||
|
}
|
||||||
|
// pattern = '/' or '/...' or '/x/...'
|
||||||
|
// split = ['', ''] or ['', ...] or ['', 'x', ...]
|
||||||
|
// Drive and UNC both considered absolute on windows
|
||||||
|
/**
|
||||||
|
* True if the pattern is rooted on an absolute path
|
||||||
|
*/
|
||||||
|
isAbsolute() {
|
||||||
|
const pl = this.#patternList;
|
||||||
|
return this.#isAbsolute !== undefined
|
||||||
|
? this.#isAbsolute
|
||||||
|
: (this.#isAbsolute =
|
||||||
|
(pl[0] === '' && pl.length > 1) ||
|
||||||
|
this.isDrive() ||
|
||||||
|
this.isUNC());
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* consume the root of the pattern, and return it
|
||||||
|
*/
|
||||||
|
root() {
|
||||||
|
const p = this.#patternList[0];
|
||||||
|
return typeof p === 'string' && this.isAbsolute() && this.#index === 0
|
||||||
|
? p
|
||||||
|
: '';
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Check to see if the current globstar pattern is allowed to follow
|
||||||
|
* a symbolic link.
|
||||||
|
*/
|
||||||
|
checkFollowGlobstar() {
|
||||||
|
return !(this.#index === 0 ||
|
||||||
|
!this.isGlobstar() ||
|
||||||
|
!this.#followGlobstar);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Mark that the current globstar pattern is following a symbolic link
|
||||||
|
*/
|
||||||
|
markFollowGlobstar() {
|
||||||
|
if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
|
||||||
|
return false;
|
||||||
|
this.#followGlobstar = false;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.Pattern = Pattern;
|
||||||
|
//# sourceMappingURL=pattern.js.map
|
||||||
1
node_modules/glob/dist/cjs/pattern.js.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/pattern.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
59
node_modules/glob/dist/cjs/processor.d.ts
generated
vendored
Normal file
59
node_modules/glob/dist/cjs/processor.d.ts
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
import { MMRegExp } from 'minimatch';
|
||||||
|
import { Path } from 'path-scurry';
|
||||||
|
import { Pattern } from './pattern.js';
|
||||||
|
import { GlobWalkerOpts } from './walker.js';
|
||||||
|
/**
|
||||||
|
* A cache of which patterns have been processed for a given Path
|
||||||
|
*/
|
||||||
|
export declare class HasWalkedCache {
|
||||||
|
store: Map<string, Set<string>>;
|
||||||
|
constructor(store?: Map<string, Set<string>>);
|
||||||
|
copy(): HasWalkedCache;
|
||||||
|
hasWalked(target: Path, pattern: Pattern): boolean | undefined;
|
||||||
|
storeWalked(target: Path, pattern: Pattern): void;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A record of which paths have been matched in a given walk step,
|
||||||
|
* and whether they only are considered a match if they are a directory,
|
||||||
|
* and whether their absolute or relative path should be returned.
|
||||||
|
*/
|
||||||
|
export declare class MatchRecord {
|
||||||
|
store: Map<Path, number>;
|
||||||
|
add(target: Path, absolute: boolean, ifDir: boolean): void;
|
||||||
|
entries(): [Path, boolean, boolean][];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A collection of patterns that must be processed in a subsequent step
|
||||||
|
* for a given path.
|
||||||
|
*/
|
||||||
|
export declare class SubWalks {
|
||||||
|
store: Map<Path, Pattern[]>;
|
||||||
|
add(target: Path, pattern: Pattern): void;
|
||||||
|
get(target: Path): Pattern[];
|
||||||
|
entries(): [Path, Pattern[]][];
|
||||||
|
keys(): Path[];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The class that processes patterns for a given path.
|
||||||
|
*
|
||||||
|
* Handles child entry filtering, and determining whether a path's
|
||||||
|
* directory contents must be read.
|
||||||
|
*/
|
||||||
|
export declare class Processor {
|
||||||
|
hasWalkedCache: HasWalkedCache;
|
||||||
|
matches: MatchRecord;
|
||||||
|
subwalks: SubWalks;
|
||||||
|
patterns?: Pattern[];
|
||||||
|
follow: boolean;
|
||||||
|
dot: boolean;
|
||||||
|
opts: GlobWalkerOpts;
|
||||||
|
constructor(opts: GlobWalkerOpts, hasWalkedCache?: HasWalkedCache);
|
||||||
|
processPatterns(target: Path, patterns: Pattern[]): this;
|
||||||
|
subwalkTargets(): Path[];
|
||||||
|
child(): Processor;
|
||||||
|
filterEntries(parent: Path, entries: Path[]): Processor;
|
||||||
|
testGlobstar(e: Path, pattern: Pattern, rest: Pattern | null, absolute: boolean): void;
|
||||||
|
testRegExp(e: Path, p: MMRegExp, rest: Pattern | null, absolute: boolean): void;
|
||||||
|
testString(e: Path, p: string, rest: Pattern | null, absolute: boolean): void;
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=processor.d.ts.map
|
||||||
1
node_modules/glob/dist/cjs/processor.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/processor.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"processor.d.ts","sourceRoot":"","sources":["../../src/processor.ts"],"names":[],"mappings":"AAEA,OAAO,EAAY,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC9C,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,EAAa,OAAO,EAAE,MAAM,cAAc,CAAA;AACjD,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAE5C;;GAEG;AACH,qBAAa,cAAc;IACzB,KAAK,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC,CAAA;gBACnB,KAAK,GAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,CAAa;IAGvD,IAAI;IAGJ,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;IAGxC,WAAW,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;CAM3C;AAED;;;;GAIG;AACH,qBAAa,WAAW;IACtB,KAAK,EAAE,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAY;IACpC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO;IAMnD,OAAO,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE;CAOtC;AAED;;;GAGG;AACH,qBAAa,QAAQ;IACnB,KAAK,EAAE,GAAG,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,CAAY;IACvC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;IAWlC,GAAG,CAAC,MAAM,EAAE,IAAI,GAAG,OAAO,EAAE;IAS5B,OAAO,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE;IAG9B,IAAI,IAAI,IAAI,EAAE;CAGf;AAED;;;;;GAKG;AACH,qBAAa,SAAS;IACpB,cAAc,EAAE,cAAc,CAAA;IAC9B,OAAO,cAAoB;IAC3B,QAAQ,WAAiB;IACzB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,GAAG,EAAE,OAAO,CAAA;IACZ,IAAI,EAAE,cAAc,CAAA;gBAER,IAAI,EAAE,cAAc,EAAE,cAAc,CAAC,EAAE,cAAc;IASjE,eAAe,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE;IAsGjD,cAAc,IAAI,IAAI,EAAE;IAIxB,KAAK;IAQL,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,GAAG,SAAS;IAqBvD,YAAY,CACV,CAAC,EAAE,IAAI,EACP,OAAO,EAAE,OAAO,EAChB,IAAI,EAAE,OAAO,GAAG,IAAI,EACpB,QAAQ,EAAE,OAAO;IA8CnB,UAAU,CACR,CAAC,EAAE,IAAI,EACP,CAAC,EAAE,QAAQ,EACX,IAAI,EAAE,OAAO,GAAG,IAAI,EACpB,QAAQ,EAAE,OAAO;IAUnB,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,GAAG,IAAI,EAAE,QAAQ,EAAE,OAAO;CASvE"}
|
||||||
307
node_modules/glob/dist/cjs/processor.js
generated
vendored
Normal file
307
node_modules/glob/dist/cjs/processor.js
generated
vendored
Normal file
@@ -0,0 +1,307 @@
|
|||||||
|
"use strict";
|
||||||
|
// synchronous utility for filtering entries and calculating subwalks
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0;
|
||||||
|
const minimatch_1 = require("minimatch");
|
||||||
|
/**
|
||||||
|
* A cache of which patterns have been processed for a given Path
|
||||||
|
*/
|
||||||
|
class HasWalkedCache {
|
||||||
|
store;
|
||||||
|
constructor(store = new Map()) {
|
||||||
|
this.store = store;
|
||||||
|
}
|
||||||
|
copy() {
|
||||||
|
return new HasWalkedCache(new Map(this.store));
|
||||||
|
}
|
||||||
|
hasWalked(target, pattern) {
|
||||||
|
return this.store.get(target.fullpath())?.has(pattern.globString());
|
||||||
|
}
|
||||||
|
storeWalked(target, pattern) {
|
||||||
|
const fullpath = target.fullpath();
|
||||||
|
const cached = this.store.get(fullpath);
|
||||||
|
if (cached)
|
||||||
|
cached.add(pattern.globString());
|
||||||
|
else
|
||||||
|
this.store.set(fullpath, new Set([pattern.globString()]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.HasWalkedCache = HasWalkedCache;
|
||||||
|
/**
|
||||||
|
* A record of which paths have been matched in a given walk step,
|
||||||
|
* and whether they only are considered a match if they are a directory,
|
||||||
|
* and whether their absolute or relative path should be returned.
|
||||||
|
*/
|
||||||
|
class MatchRecord {
|
||||||
|
store = new Map();
|
||||||
|
add(target, absolute, ifDir) {
|
||||||
|
const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0);
|
||||||
|
const current = this.store.get(target);
|
||||||
|
this.store.set(target, current === undefined ? n : n & current);
|
||||||
|
}
|
||||||
|
// match, absolute, ifdir
|
||||||
|
entries() {
|
||||||
|
return [...this.store.entries()].map(([path, n]) => [
|
||||||
|
path,
|
||||||
|
!!(n & 2),
|
||||||
|
!!(n & 1),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.MatchRecord = MatchRecord;
|
||||||
|
/**
|
||||||
|
* A collection of patterns that must be processed in a subsequent step
|
||||||
|
* for a given path.
|
||||||
|
*/
|
||||||
|
class SubWalks {
|
||||||
|
store = new Map();
|
||||||
|
add(target, pattern) {
|
||||||
|
if (!target.canReaddir()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const subs = this.store.get(target);
|
||||||
|
if (subs) {
|
||||||
|
if (!subs.find(p => p.globString() === pattern.globString())) {
|
||||||
|
subs.push(pattern);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
this.store.set(target, [pattern]);
|
||||||
|
}
|
||||||
|
get(target) {
|
||||||
|
const subs = this.store.get(target);
|
||||||
|
/* c8 ignore start */
|
||||||
|
if (!subs) {
|
||||||
|
throw new Error('attempting to walk unknown path');
|
||||||
|
}
|
||||||
|
/* c8 ignore stop */
|
||||||
|
return subs;
|
||||||
|
}
|
||||||
|
entries() {
|
||||||
|
return this.keys().map(k => [k, this.store.get(k)]);
|
||||||
|
}
|
||||||
|
keys() {
|
||||||
|
return [...this.store.keys()].filter(t => t.canReaddir());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.SubWalks = SubWalks;
|
||||||
|
/**
|
||||||
|
* The class that processes patterns for a given path.
|
||||||
|
*
|
||||||
|
* Handles child entry filtering, and determining whether a path's
|
||||||
|
* directory contents must be read.
|
||||||
|
*/
|
||||||
|
class Processor {
|
||||||
|
hasWalkedCache;
|
||||||
|
matches = new MatchRecord();
|
||||||
|
subwalks = new SubWalks();
|
||||||
|
patterns;
|
||||||
|
follow;
|
||||||
|
dot;
|
||||||
|
opts;
|
||||||
|
constructor(opts, hasWalkedCache) {
|
||||||
|
this.opts = opts;
|
||||||
|
this.follow = !!opts.follow;
|
||||||
|
this.dot = !!opts.dot;
|
||||||
|
this.hasWalkedCache = hasWalkedCache
|
||||||
|
? hasWalkedCache.copy()
|
||||||
|
: new HasWalkedCache();
|
||||||
|
}
|
||||||
|
processPatterns(target, patterns) {
|
||||||
|
this.patterns = patterns;
|
||||||
|
const processingSet = patterns.map(p => [target, p]);
|
||||||
|
// map of paths to the magic-starting subwalks they need to walk
|
||||||
|
// first item in patterns is the filter
|
||||||
|
for (let [t, pattern] of processingSet) {
|
||||||
|
this.hasWalkedCache.storeWalked(t, pattern);
|
||||||
|
const root = pattern.root();
|
||||||
|
const absolute = pattern.isAbsolute() && this.opts.absolute !== false;
|
||||||
|
// start absolute patterns at root
|
||||||
|
if (root) {
|
||||||
|
t = t.resolve(root === '/' && this.opts.root !== undefined
|
||||||
|
? this.opts.root
|
||||||
|
: root);
|
||||||
|
const rest = pattern.rest();
|
||||||
|
if (!rest) {
|
||||||
|
this.matches.add(t, true, false);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
pattern = rest;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let p;
|
||||||
|
let rest;
|
||||||
|
let changed = false;
|
||||||
|
while (typeof (p = pattern.pattern()) === 'string' &&
|
||||||
|
(rest = pattern.rest())) {
|
||||||
|
const c = t.resolve(p);
|
||||||
|
// we can be reasonably sure that .. is a readable dir
|
||||||
|
if (c.isUnknown() && p !== '..')
|
||||||
|
break;
|
||||||
|
t = c;
|
||||||
|
pattern = rest;
|
||||||
|
changed = true;
|
||||||
|
}
|
||||||
|
p = pattern.pattern();
|
||||||
|
rest = pattern.rest();
|
||||||
|
if (changed) {
|
||||||
|
if (this.hasWalkedCache.hasWalked(t, pattern))
|
||||||
|
continue;
|
||||||
|
this.hasWalkedCache.storeWalked(t, pattern);
|
||||||
|
}
|
||||||
|
// now we have either a final string for a known entry,
|
||||||
|
// more strings for an unknown entry,
|
||||||
|
// or a pattern starting with magic, mounted on t.
|
||||||
|
if (typeof p === 'string') {
|
||||||
|
// must be final entry
|
||||||
|
if (!rest) {
|
||||||
|
const ifDir = p === '..' || p === '' || p === '.';
|
||||||
|
this.matches.add(t.resolve(p), absolute, ifDir);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.subwalks.add(t, pattern);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
else if (p === minimatch_1.GLOBSTAR) {
|
||||||
|
// if no rest, match and subwalk pattern
|
||||||
|
// if rest, process rest and subwalk pattern
|
||||||
|
// if it's a symlink, but we didn't get here by way of a
|
||||||
|
// globstar match (meaning it's the first time THIS globstar
|
||||||
|
// has traversed a symlink), then we follow it. Otherwise, stop.
|
||||||
|
if (!t.isSymbolicLink() ||
|
||||||
|
this.follow ||
|
||||||
|
pattern.checkFollowGlobstar()) {
|
||||||
|
this.subwalks.add(t, pattern);
|
||||||
|
}
|
||||||
|
const rp = rest?.pattern();
|
||||||
|
const rrest = rest?.rest();
|
||||||
|
if (!rest || ((rp === '' || rp === '.') && !rrest)) {
|
||||||
|
// only HAS to be a dir if it ends in **/ or **/.
|
||||||
|
// but ending in ** will match files as well.
|
||||||
|
this.matches.add(t, absolute, rp === '' || rp === '.');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if (rp === '..') {
|
||||||
|
// this would mean you're matching **/.. at the fs root,
|
||||||
|
// and no thanks, I'm not gonna test that specific case.
|
||||||
|
/* c8 ignore start */
|
||||||
|
const tp = t.parent || t;
|
||||||
|
/* c8 ignore stop */
|
||||||
|
if (!rrest)
|
||||||
|
this.matches.add(tp, absolute, true);
|
||||||
|
else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {
|
||||||
|
this.subwalks.add(tp, rrest);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (p instanceof RegExp) {
|
||||||
|
this.subwalks.add(t, pattern);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
subwalkTargets() {
|
||||||
|
return this.subwalks.keys();
|
||||||
|
}
|
||||||
|
child() {
|
||||||
|
return new Processor(this.opts, this.hasWalkedCache);
|
||||||
|
}
|
||||||
|
// return a new Processor containing the subwalks for each
|
||||||
|
// child entry, and a set of matches, and
|
||||||
|
// a hasWalkedCache that's a copy of this one
|
||||||
|
// then we're going to call
|
||||||
|
filterEntries(parent, entries) {
|
||||||
|
const patterns = this.subwalks.get(parent);
|
||||||
|
// put matches and entry walks into the results processor
|
||||||
|
const results = this.child();
|
||||||
|
for (const e of entries) {
|
||||||
|
for (const pattern of patterns) {
|
||||||
|
const absolute = pattern.isAbsolute();
|
||||||
|
const p = pattern.pattern();
|
||||||
|
const rest = pattern.rest();
|
||||||
|
if (p === minimatch_1.GLOBSTAR) {
|
||||||
|
results.testGlobstar(e, pattern, rest, absolute);
|
||||||
|
}
|
||||||
|
else if (p instanceof RegExp) {
|
||||||
|
results.testRegExp(e, p, rest, absolute);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
results.testString(e, p, rest, absolute);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
testGlobstar(e, pattern, rest, absolute) {
|
||||||
|
if (this.dot || !e.name.startsWith('.')) {
|
||||||
|
if (!pattern.hasMore()) {
|
||||||
|
this.matches.add(e, absolute, false);
|
||||||
|
}
|
||||||
|
if (e.canReaddir()) {
|
||||||
|
// if we're in follow mode or it's not a symlink, just keep
|
||||||
|
// testing the same pattern. If there's more after the globstar,
|
||||||
|
// then this symlink consumes the globstar. If not, then we can
|
||||||
|
// follow at most ONE symlink along the way, so we mark it, which
|
||||||
|
// also checks to ensure that it wasn't already marked.
|
||||||
|
if (this.follow || !e.isSymbolicLink()) {
|
||||||
|
this.subwalks.add(e, pattern);
|
||||||
|
}
|
||||||
|
else if (e.isSymbolicLink()) {
|
||||||
|
if (rest && pattern.checkFollowGlobstar()) {
|
||||||
|
this.subwalks.add(e, rest);
|
||||||
|
}
|
||||||
|
else if (pattern.markFollowGlobstar()) {
|
||||||
|
this.subwalks.add(e, pattern);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// if the NEXT thing matches this entry, then also add
|
||||||
|
// the rest.
|
||||||
|
if (rest) {
|
||||||
|
const rp = rest.pattern();
|
||||||
|
if (typeof rp === 'string' &&
|
||||||
|
// dots and empty were handled already
|
||||||
|
rp !== '..' &&
|
||||||
|
rp !== '' &&
|
||||||
|
rp !== '.') {
|
||||||
|
this.testString(e, rp, rest.rest(), absolute);
|
||||||
|
}
|
||||||
|
else if (rp === '..') {
|
||||||
|
/* c8 ignore start */
|
||||||
|
const ep = e.parent || e;
|
||||||
|
/* c8 ignore stop */
|
||||||
|
this.subwalks.add(ep, rest);
|
||||||
|
}
|
||||||
|
else if (rp instanceof RegExp) {
|
||||||
|
this.testRegExp(e, rp, rest.rest(), absolute);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
testRegExp(e, p, rest, absolute) {
|
||||||
|
if (!p.test(e.name))
|
||||||
|
return;
|
||||||
|
if (!rest) {
|
||||||
|
this.matches.add(e, absolute, false);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.subwalks.add(e, rest);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
testString(e, p, rest, absolute) {
|
||||||
|
// should never happen?
|
||||||
|
if (!e.isNamed(p))
|
||||||
|
return;
|
||||||
|
if (!rest) {
|
||||||
|
this.matches.add(e, absolute, false);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.subwalks.add(e, rest);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.Processor = Processor;
|
||||||
|
//# sourceMappingURL=processor.js.map
|
||||||
1
node_modules/glob/dist/cjs/processor.js.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/processor.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
95
node_modules/glob/dist/cjs/walker.d.ts
generated
vendored
Normal file
95
node_modules/glob/dist/cjs/walker.d.ts
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
/**
|
||||||
|
* Single-use utility classes to provide functionality to the {@link Glob}
|
||||||
|
* methods.
|
||||||
|
*
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
import Minipass from 'minipass';
|
||||||
|
import { Path } from 'path-scurry';
|
||||||
|
import { IgnoreLike } from './ignore.js';
|
||||||
|
import { Pattern } from './pattern.js';
|
||||||
|
import { Processor } from './processor.js';
|
||||||
|
export interface GlobWalkerOpts {
|
||||||
|
absolute?: boolean;
|
||||||
|
allowWindowsEscape?: boolean;
|
||||||
|
cwd?: string | URL;
|
||||||
|
dot?: boolean;
|
||||||
|
dotRelative?: boolean;
|
||||||
|
follow?: boolean;
|
||||||
|
ignore?: string | string[] | IgnoreLike;
|
||||||
|
mark?: boolean;
|
||||||
|
matchBase?: boolean;
|
||||||
|
maxDepth?: number;
|
||||||
|
nobrace?: boolean;
|
||||||
|
nocase?: boolean;
|
||||||
|
nodir?: boolean;
|
||||||
|
noext?: boolean;
|
||||||
|
noglobstar?: boolean;
|
||||||
|
platform?: NodeJS.Platform;
|
||||||
|
realpath?: boolean;
|
||||||
|
root?: string;
|
||||||
|
stat?: boolean;
|
||||||
|
signal?: AbortSignal;
|
||||||
|
windowsPathsNoEscape?: boolean;
|
||||||
|
withFileTypes?: boolean;
|
||||||
|
}
|
||||||
|
export type GWOFileTypesTrue = GlobWalkerOpts & {
|
||||||
|
withFileTypes: true;
|
||||||
|
};
|
||||||
|
export type GWOFileTypesFalse = GlobWalkerOpts & {
|
||||||
|
withFileTypes: false;
|
||||||
|
};
|
||||||
|
export type GWOFileTypesUnset = GlobWalkerOpts & {
|
||||||
|
withFileTypes?: undefined;
|
||||||
|
};
|
||||||
|
export type Result<O extends GlobWalkerOpts> = O extends GWOFileTypesTrue ? Path : O extends GWOFileTypesFalse ? string : O extends GWOFileTypesUnset ? string : Path | string;
|
||||||
|
export type Matches<O extends GlobWalkerOpts> = O extends GWOFileTypesTrue ? Set<Path> : O extends GWOFileTypesFalse ? Set<string> : O extends GWOFileTypesUnset ? Set<string> : Set<Path | string>;
|
||||||
|
export type MatchStream<O extends GlobWalkerOpts> = O extends GWOFileTypesTrue ? Minipass<Path, Path> : O extends GWOFileTypesFalse ? Minipass<string, string> : O extends GWOFileTypesUnset ? Minipass<string, string> : Minipass<Path | string, Path | string>;
|
||||||
|
/**
|
||||||
|
* basic walking utilities that all the glob walker types use
|
||||||
|
*/
|
||||||
|
export declare abstract class GlobUtil<O extends GlobWalkerOpts = GlobWalkerOpts> {
|
||||||
|
#private;
|
||||||
|
path: Path;
|
||||||
|
patterns: Pattern[];
|
||||||
|
opts: O;
|
||||||
|
seen: Set<Path>;
|
||||||
|
paused: boolean;
|
||||||
|
aborted: boolean;
|
||||||
|
signal?: AbortSignal;
|
||||||
|
maxDepth: number;
|
||||||
|
constructor(patterns: Pattern[], path: Path, opts: O);
|
||||||
|
pause(): void;
|
||||||
|
resume(): void;
|
||||||
|
onResume(fn: () => any): void;
|
||||||
|
matchCheck(e: Path, ifDir: boolean): Promise<Path | undefined>;
|
||||||
|
matchCheckTest(e: Path | undefined, ifDir: boolean): Path | undefined;
|
||||||
|
matchCheckSync(e: Path, ifDir: boolean): Path | undefined;
|
||||||
|
abstract matchEmit(p: Result<O>): void;
|
||||||
|
abstract matchEmit(p: string | Path): void;
|
||||||
|
matchFinish(e: Path, absolute: boolean): void;
|
||||||
|
match(e: Path, absolute: boolean, ifDir: boolean): Promise<void>;
|
||||||
|
matchSync(e: Path, absolute: boolean, ifDir: boolean): void;
|
||||||
|
walkCB(target: Path, patterns: Pattern[], cb: () => any): void;
|
||||||
|
walkCB2(target: Path, patterns: Pattern[], processor: Processor, cb: () => any): any;
|
||||||
|
walkCB3(target: Path, entries: Path[], processor: Processor, cb: () => any): void;
|
||||||
|
walkCBSync(target: Path, patterns: Pattern[], cb: () => any): void;
|
||||||
|
walkCB2Sync(target: Path, patterns: Pattern[], processor: Processor, cb: () => any): any;
|
||||||
|
walkCB3Sync(target: Path, entries: Path[], processor: Processor, cb: () => any): void;
|
||||||
|
}
|
||||||
|
export declare class GlobWalker<O extends GlobWalkerOpts = GlobWalkerOpts> extends GlobUtil<O> {
|
||||||
|
matches: O extends GWOFileTypesTrue ? Set<Path> : O extends GWOFileTypesFalse ? Set<string> : O extends GWOFileTypesUnset ? Set<string> : Set<Path | string>;
|
||||||
|
constructor(patterns: Pattern[], path: Path, opts: O);
|
||||||
|
matchEmit(e: Result<O>): void;
|
||||||
|
walk(): Promise<Matches<O>>;
|
||||||
|
walkSync(): Matches<O>;
|
||||||
|
}
|
||||||
|
export declare class GlobStream<O extends GlobWalkerOpts = GlobWalkerOpts> extends GlobUtil<O> {
|
||||||
|
results: O extends GWOFileTypesTrue ? Minipass<Path, Path> : O extends GWOFileTypesFalse ? Minipass<string, string> : O extends GWOFileTypesUnset ? Minipass<string, string> : Minipass<Path | string, Path | string>;
|
||||||
|
constructor(patterns: Pattern[], path: Path, opts: O);
|
||||||
|
matchEmit(e: Result<O>): void;
|
||||||
|
stream(): MatchStream<O>;
|
||||||
|
streamSync(): MatchStream<O>;
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=walker.d.ts.map
|
||||||
1
node_modules/glob/dist/cjs/walker.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/walker.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"walker.d.ts","sourceRoot":"","sources":["../../src/walker.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;AACH,OAAO,QAAQ,MAAM,UAAU,CAAA;AAC/B,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,EAAU,UAAU,EAAE,MAAM,aAAa,CAAA;AAOhD,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAE1C,MAAM,WAAW,cAAc;IAC7B,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAC5B,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAClB,GAAG,CAAC,EAAE,OAAO,CAAA;IACb,WAAW,CAAC,EAAE,OAAO,CAAA;IACrB,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,CAAC,EAAE,OAAO,CAAA;IAGnB,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,OAAO,CAAC,EAAE,OAAO,CAAA;IACjB,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,UAAU,CAAC,EAAE,OAAO,CAAA;IACpB,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAC1B,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAC9B,aAAa,CAAC,EAAE,OAAO,CAAA;CACxB;AAED,MAAM,MAAM,gBAAgB,GAAG,cAAc,GAAG;IAC9C,aAAa,EAAE,IAAI,CAAA;CACpB,CAAA;AACD,MAAM,MAAM,iBAAiB,GAAG,cAAc,GAAG;IAC/C,aAAa,EAAE,KAAK,CAAA;CACrB,CAAA;AACD,MAAM,MAAM,iBAAiB,GAAG,cAAc,GAAG;IAC/C,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,CAAC,SAAS,cAAc,IAAI,CAAC,SAAS,gBAAgB,GACrE,IAAI,GACJ,CAAC,SAAS,iBAAiB,GAC3B,MAAM,GACN,CAAC,SAAS,iBAAiB,GAC3B,MAAM,GACN,IAAI,GAAG,MAAM,CAAA;AAEjB,MAAM,MAAM,OAAO,CAAC,CAAC,SAAS,cAAc,IAAI,CAAC,SAAS,gBAAgB,GACtE,GAAG,CAAC,IAAI,CAAC,GACT,CAAC,SAAS,iBAAiB,GAC3B,GAAG,CAAC,MAAM,CAAC,GACX,CAAC,SAAS,iBAAiB,GAC3B,GAAG,CAAC,MAAM,CAAC,GACX,GAAG,CAAC,IAAI,GAAG,MAAM,CAAC,CAAA;AAEtB,MAAM,MAAM,WAAW,CAAC,CAAC,SAAS,cAAc,IAC9C,CAAC,SAAS,gBAAgB,GACtB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GACpB,CAAC,SAAS,iBAAiB,GAC3B,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,GACxB,CAAC,SAAS,iBAAiB,GAC3B,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,GACxB,QAAQ,CAAC,IAAI,GAAG,MAAM,EAAE,IAAI,GAAG,MAAM,CAAC,CAAA;AAY5C;;GAEG;AACH,8BAAsB,QAAQ,CAAC,CAAC,SAAS,cAAc,GAAG,cAAc;;IACtE,IAAI,EAAE,IAAI,CAAA;IACV,QAAQ,EAAE,OAAO,EAAE,CAAA;IACnB,IAAI,EAAE,CAAC,CAAA;IACP,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAkB;IACjC,MAAM,EAAE,OAAO,CAAQ;IACvB,OAAO,EAAE,OAAO,CAAQ;IAIxB,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,QAAQ,EAAE,MAAM,CAAA;gBAEJ,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IA8BpD,KAAK;IAGL,MAAM;IAUN,QAAQ,CAAC,EAAE,EAAE,MAAM,GAAG;IAahB,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,GAAG,SAAS,CAAC;IAYpE,cAAc,CAAC,CAAC,EAAE,IAAI,GAAG,SAAS,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI,GAAG,SAAS;IAUrE,cAAc,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI,GAAG,SAAS;IAYzD,QAAQ,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IACtC,QAAQ,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,IAAI;IAE1C,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO;IAqBhC,KAAK,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAKtE,SAAS,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI;IAK3D,MAAM,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,EAAE,EAAE,EAAE,MAAM,GAAG;IAOvD,OAAO,CACL,MAAM,EAAE,IAAI,EACZ,QAAQ,EAAE,OAAO,EAAE,EACnB,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IA2Cf,OAAO,CACL,MAAM,EAAE,IAAI,EACZ,OAAO,EAAE,IAAI,EAAE,EACf,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IAsBf,UAAU,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,EAAE,EAAE,EAAE,MAAM,GAAG;IAO3D,WAAW,CACT,MAAM,EAAE,IAAI,EACZ,QAAQ,EAAE,OAAO,EAAE,EACnB,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IAqCf,WAAW,CACT,MAAM,EAAE,IAAI,EACZ,OAAO,EAAE,IAAI,EAAE,EACf,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;CAoBhB;AAED,qBAAa,UAAU,CACrB,CAAC,SAAS,cAAc,GAAG,cAAc,CACzC,SAAQ,QAAQ,CAAC,CAAC,CAAC;IACnB,OAAO,EAAE,CAAC,SAAS,gBAAgB,GAC/B,GAAG,CAAC,IAAI,CAAC,GACT,CAAC,SAAS,iBAAiB,GAC3B,GAAG,CAAC,MAAM,CAAC,GACX,CAAC,SAAS,iBAAiB,GAC3B,GAAG,CAAC,MAAM,CAAC,GACX,GAAG,CAAC,IAAI,GAAG,MAAM,CAAC,CAAA;gBAEV,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IAKpD,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IAKvB,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;IAiBjC,QAAQ,IAAI,OAAO,CAAC,CAAC,CAAC;CAWvB;AAED,qBAAa,UAAU,CACrB,CAAC,SAAS,cAAc,GAAG,cAAc,CACzC,SAAQ,QAAQ,CAAC,CAAC,CAAC;IACnB,OAAO,EAAE,CAAC,SAAS,gBAAgB,GAC/B,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GACpB,CAAC,SAAS,iBAAiB,GAC3B,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,GACxB,CAAC,SAAS,iBAAiB,GAC3B,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,GACxB,QAAQ,CAAC,IAAI,GAAG,MAAM,EAAE,IAAI,GAAG,MAAM,CAAC,CAAA;gBAE9B,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IAUpD,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IAM7B,MAAM,IAAI,WAAW,CAAC,CAAC,CAAC;IAgBxB,UAAU,IAAI,WAAW,CAAC,CAAC,CAAC;CAW7B"}
|
||||||
370
node_modules/glob/dist/cjs/walker.js
generated
vendored
Normal file
370
node_modules/glob/dist/cjs/walker.js
generated
vendored
Normal file
@@ -0,0 +1,370 @@
|
|||||||
|
"use strict";
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.GlobStream = exports.GlobWalker = exports.GlobUtil = void 0;
|
||||||
|
/**
|
||||||
|
* Single-use utility classes to provide functionality to the {@link Glob}
|
||||||
|
* methods.
|
||||||
|
*
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
const minipass_1 = __importDefault(require("minipass"));
|
||||||
|
const ignore_js_1 = require("./ignore.js");
|
||||||
|
const processor_js_1 = require("./processor.js");
|
||||||
|
const makeIgnore = (ignore, opts) => typeof ignore === 'string'
|
||||||
|
? new ignore_js_1.Ignore([ignore], opts)
|
||||||
|
: Array.isArray(ignore)
|
||||||
|
? new ignore_js_1.Ignore(ignore, opts)
|
||||||
|
: ignore;
|
||||||
|
/**
|
||||||
|
* basic walking utilities that all the glob walker types use
|
||||||
|
*/
|
||||||
|
class GlobUtil {
|
||||||
|
path;
|
||||||
|
patterns;
|
||||||
|
opts;
|
||||||
|
seen = new Set();
|
||||||
|
paused = false;
|
||||||
|
aborted = false;
|
||||||
|
#onResume = [];
|
||||||
|
#ignore;
|
||||||
|
#sep;
|
||||||
|
signal;
|
||||||
|
maxDepth;
|
||||||
|
constructor(patterns, path, opts) {
|
||||||
|
this.patterns = patterns;
|
||||||
|
this.path = path;
|
||||||
|
this.opts = opts;
|
||||||
|
this.#sep = opts.platform === 'win32' ? '\\' : '/';
|
||||||
|
if (opts.ignore) {
|
||||||
|
this.#ignore = makeIgnore(opts.ignore, opts);
|
||||||
|
}
|
||||||
|
// ignore, always set with maxDepth, but it's optional on the
|
||||||
|
// GlobOptions type
|
||||||
|
/* c8 ignore start */
|
||||||
|
this.maxDepth = opts.maxDepth || Infinity;
|
||||||
|
/* c8 ignore stop */
|
||||||
|
if (opts.signal) {
|
||||||
|
this.signal = opts.signal;
|
||||||
|
this.signal.addEventListener('abort', () => {
|
||||||
|
this.#onResume.length = 0;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#ignored(path) {
|
||||||
|
return this.seen.has(path) || !!this.#ignore?.ignored?.(path);
|
||||||
|
}
|
||||||
|
#childrenIgnored(path) {
|
||||||
|
return !!this.#ignore?.childrenIgnored?.(path);
|
||||||
|
}
|
||||||
|
// backpressure mechanism
|
||||||
|
pause() {
|
||||||
|
this.paused = true;
|
||||||
|
}
|
||||||
|
resume() {
|
||||||
|
/* c8 ignore start */
|
||||||
|
if (this.signal?.aborted)
|
||||||
|
return;
|
||||||
|
/* c8 ignore stop */
|
||||||
|
this.paused = false;
|
||||||
|
let fn = undefined;
|
||||||
|
while (!this.paused && (fn = this.#onResume.shift())) {
|
||||||
|
fn();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
onResume(fn) {
|
||||||
|
if (this.signal?.aborted)
|
||||||
|
return;
|
||||||
|
/* c8 ignore start */
|
||||||
|
if (!this.paused) {
|
||||||
|
fn();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
/* c8 ignore stop */
|
||||||
|
this.#onResume.push(fn);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// do the requisite realpath/stat checking, and return the path
|
||||||
|
// to add or undefined to filter it out.
|
||||||
|
async matchCheck(e, ifDir) {
|
||||||
|
if (ifDir && this.opts.nodir)
|
||||||
|
return undefined;
|
||||||
|
let rpc;
|
||||||
|
if (this.opts.realpath) {
|
||||||
|
rpc = e.realpathCached() || (await e.realpath());
|
||||||
|
if (!rpc)
|
||||||
|
return undefined;
|
||||||
|
e = rpc;
|
||||||
|
}
|
||||||
|
const needStat = e.isUnknown() || this.opts.stat;
|
||||||
|
return this.matchCheckTest(needStat ? await e.lstat() : e, ifDir);
|
||||||
|
}
|
||||||
|
matchCheckTest(e, ifDir) {
|
||||||
|
return e &&
|
||||||
|
(this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&
|
||||||
|
(!ifDir || e.canReaddir()) &&
|
||||||
|
(!this.opts.nodir || !e.isDirectory()) &&
|
||||||
|
!this.#ignored(e)
|
||||||
|
? e
|
||||||
|
: undefined;
|
||||||
|
}
|
||||||
|
matchCheckSync(e, ifDir) {
|
||||||
|
if (ifDir && this.opts.nodir)
|
||||||
|
return undefined;
|
||||||
|
let rpc;
|
||||||
|
if (this.opts.realpath) {
|
||||||
|
rpc = e.realpathCached() || e.realpathSync();
|
||||||
|
if (!rpc)
|
||||||
|
return undefined;
|
||||||
|
e = rpc;
|
||||||
|
}
|
||||||
|
const needStat = e.isUnknown() || this.opts.stat;
|
||||||
|
return this.matchCheckTest(needStat ? e.lstatSync() : e, ifDir);
|
||||||
|
}
|
||||||
|
matchFinish(e, absolute) {
|
||||||
|
if (this.#ignored(e))
|
||||||
|
return;
|
||||||
|
const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute;
|
||||||
|
this.seen.add(e);
|
||||||
|
const mark = this.opts.mark && e.isDirectory() ? this.#sep : '';
|
||||||
|
// ok, we have what we need!
|
||||||
|
if (this.opts.withFileTypes) {
|
||||||
|
this.matchEmit(e);
|
||||||
|
}
|
||||||
|
else if (abs) {
|
||||||
|
this.matchEmit(e.fullpath() + mark);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const rel = e.relative();
|
||||||
|
const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep)
|
||||||
|
? '.' + this.#sep
|
||||||
|
: '';
|
||||||
|
this.matchEmit(!rel && mark ? '.' + mark : pre + rel + mark);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async match(e, absolute, ifDir) {
|
||||||
|
const p = await this.matchCheck(e, ifDir);
|
||||||
|
if (p)
|
||||||
|
this.matchFinish(p, absolute);
|
||||||
|
}
|
||||||
|
matchSync(e, absolute, ifDir) {
|
||||||
|
const p = this.matchCheckSync(e, ifDir);
|
||||||
|
if (p)
|
||||||
|
this.matchFinish(p, absolute);
|
||||||
|
}
|
||||||
|
walkCB(target, patterns, cb) {
|
||||||
|
/* c8 ignore start */
|
||||||
|
if (this.signal?.aborted)
|
||||||
|
cb();
|
||||||
|
/* c8 ignore stop */
|
||||||
|
this.walkCB2(target, patterns, new processor_js_1.Processor(this.opts), cb);
|
||||||
|
}
|
||||||
|
walkCB2(target, patterns, processor, cb) {
|
||||||
|
if (this.#childrenIgnored(target))
|
||||||
|
return cb();
|
||||||
|
if (this.signal?.aborted)
|
||||||
|
cb();
|
||||||
|
if (this.paused) {
|
||||||
|
this.onResume(() => this.walkCB2(target, patterns, processor, cb));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
processor.processPatterns(target, patterns);
|
||||||
|
// done processing. all of the above is sync, can be abstracted out.
|
||||||
|
// subwalks is a map of paths to the entry filters they need
|
||||||
|
// matches is a map of paths to [absolute, ifDir] tuples.
|
||||||
|
let tasks = 1;
|
||||||
|
const next = () => {
|
||||||
|
if (--tasks === 0)
|
||||||
|
cb();
|
||||||
|
};
|
||||||
|
for (const [m, absolute, ifDir] of processor.matches.entries()) {
|
||||||
|
if (this.#ignored(m))
|
||||||
|
continue;
|
||||||
|
tasks++;
|
||||||
|
this.match(m, absolute, ifDir).then(() => next());
|
||||||
|
}
|
||||||
|
for (const t of processor.subwalkTargets()) {
|
||||||
|
if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
tasks++;
|
||||||
|
const childrenCached = t.readdirCached();
|
||||||
|
if (t.calledReaddir())
|
||||||
|
this.walkCB3(t, childrenCached, processor, next);
|
||||||
|
else {
|
||||||
|
t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
walkCB3(target, entries, processor, cb) {
|
||||||
|
processor = processor.filterEntries(target, entries);
|
||||||
|
let tasks = 1;
|
||||||
|
const next = () => {
|
||||||
|
if (--tasks === 0)
|
||||||
|
cb();
|
||||||
|
};
|
||||||
|
for (const [m, absolute, ifDir] of processor.matches.entries()) {
|
||||||
|
if (this.#ignored(m))
|
||||||
|
continue;
|
||||||
|
tasks++;
|
||||||
|
this.match(m, absolute, ifDir).then(() => next());
|
||||||
|
}
|
||||||
|
for (const [target, patterns] of processor.subwalks.entries()) {
|
||||||
|
tasks++;
|
||||||
|
this.walkCB2(target, patterns, processor.child(), next);
|
||||||
|
}
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
walkCBSync(target, patterns, cb) {
|
||||||
|
/* c8 ignore start */
|
||||||
|
if (this.signal?.aborted)
|
||||||
|
cb();
|
||||||
|
/* c8 ignore stop */
|
||||||
|
this.walkCB2Sync(target, patterns, new processor_js_1.Processor(this.opts), cb);
|
||||||
|
}
|
||||||
|
walkCB2Sync(target, patterns, processor, cb) {
|
||||||
|
if (this.#childrenIgnored(target))
|
||||||
|
return cb();
|
||||||
|
if (this.signal?.aborted)
|
||||||
|
cb();
|
||||||
|
if (this.paused) {
|
||||||
|
this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
processor.processPatterns(target, patterns);
|
||||||
|
// done processing. all of the above is sync, can be abstracted out.
|
||||||
|
// subwalks is a map of paths to the entry filters they need
|
||||||
|
// matches is a map of paths to [absolute, ifDir] tuples.
|
||||||
|
let tasks = 1;
|
||||||
|
const next = () => {
|
||||||
|
if (--tasks === 0)
|
||||||
|
cb();
|
||||||
|
};
|
||||||
|
for (const [m, absolute, ifDir] of processor.matches.entries()) {
|
||||||
|
if (this.#ignored(m))
|
||||||
|
continue;
|
||||||
|
this.matchSync(m, absolute, ifDir);
|
||||||
|
}
|
||||||
|
for (const t of processor.subwalkTargets()) {
|
||||||
|
if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
tasks++;
|
||||||
|
const children = t.readdirSync();
|
||||||
|
this.walkCB3Sync(t, children, processor, next);
|
||||||
|
}
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
walkCB3Sync(target, entries, processor, cb) {
|
||||||
|
processor = processor.filterEntries(target, entries);
|
||||||
|
let tasks = 1;
|
||||||
|
const next = () => {
|
||||||
|
if (--tasks === 0)
|
||||||
|
cb();
|
||||||
|
};
|
||||||
|
for (const [m, absolute, ifDir] of processor.matches.entries()) {
|
||||||
|
if (this.#ignored(m))
|
||||||
|
continue;
|
||||||
|
this.matchSync(m, absolute, ifDir);
|
||||||
|
}
|
||||||
|
for (const [target, patterns] of processor.subwalks.entries()) {
|
||||||
|
tasks++;
|
||||||
|
this.walkCB2Sync(target, patterns, processor.child(), next);
|
||||||
|
}
|
||||||
|
next();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.GlobUtil = GlobUtil;
|
||||||
|
class GlobWalker extends GlobUtil {
|
||||||
|
matches;
|
||||||
|
constructor(patterns, path, opts) {
|
||||||
|
super(patterns, path, opts);
|
||||||
|
this.matches = new Set();
|
||||||
|
}
|
||||||
|
matchEmit(e) {
|
||||||
|
this.matches.add(e);
|
||||||
|
}
|
||||||
|
async walk() {
|
||||||
|
if (this.signal?.aborted)
|
||||||
|
throw this.signal.reason;
|
||||||
|
const t = this.path.isUnknown() ? await this.path.lstat() : this.path;
|
||||||
|
if (t) {
|
||||||
|
await new Promise((res, rej) => {
|
||||||
|
this.walkCB(t, this.patterns, () => {
|
||||||
|
if (this.signal?.aborted) {
|
||||||
|
rej(this.signal.reason);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
res(this.matches);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return this.matches;
|
||||||
|
}
|
||||||
|
walkSync() {
|
||||||
|
if (this.signal?.aborted)
|
||||||
|
throw this.signal.reason;
|
||||||
|
const t = this.path.isUnknown() ? this.path.lstatSync() : this.path;
|
||||||
|
// nothing for the callback to do, because this never pauses
|
||||||
|
if (t) {
|
||||||
|
this.walkCBSync(t, this.patterns, () => {
|
||||||
|
if (this.signal?.aborted)
|
||||||
|
throw this.signal.reason;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return this.matches;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.GlobWalker = GlobWalker;
|
||||||
|
class GlobStream extends GlobUtil {
|
||||||
|
results;
|
||||||
|
constructor(patterns, path, opts) {
|
||||||
|
super(patterns, path, opts);
|
||||||
|
this.results = new minipass_1.default({
|
||||||
|
signal: this.signal,
|
||||||
|
objectMode: true,
|
||||||
|
});
|
||||||
|
this.results.on('drain', () => this.resume());
|
||||||
|
this.results.on('resume', () => this.resume());
|
||||||
|
}
|
||||||
|
matchEmit(e) {
|
||||||
|
this.results.write(e);
|
||||||
|
if (!this.results.flowing)
|
||||||
|
this.pause();
|
||||||
|
}
|
||||||
|
stream() {
|
||||||
|
const target = this.path;
|
||||||
|
if (target.isUnknown()) {
|
||||||
|
target.lstat().then(e => {
|
||||||
|
if (e) {
|
||||||
|
this.walkCB(target, this.patterns, () => this.results.end());
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.results.end();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.walkCB(target, this.patterns, () => this.results.end());
|
||||||
|
}
|
||||||
|
return this.results;
|
||||||
|
}
|
||||||
|
streamSync() {
|
||||||
|
const target = this.path.isUnknown()
|
||||||
|
? this.path.lstatSync()
|
||||||
|
: this.path;
|
||||||
|
if (target) {
|
||||||
|
this.walkCBSync(target, this.patterns, () => this.results.end());
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.results.end();
|
||||||
|
}
|
||||||
|
return this.results;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.GlobStream = GlobStream;
|
||||||
|
//# sourceMappingURL=walker.js.map
|
||||||
1
node_modules/glob/dist/cjs/walker.js.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/walker.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
328
node_modules/glob/dist/mjs/glob.d.ts
generated
vendored
Normal file
328
node_modules/glob/dist/mjs/glob.d.ts
generated
vendored
Normal file
@@ -0,0 +1,328 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
import { Minimatch } from 'minimatch';
|
||||||
|
import Minipass from 'minipass';
|
||||||
|
import { FSOption, Path, PathScurry } from 'path-scurry';
|
||||||
|
import { IgnoreLike } from './ignore.js';
|
||||||
|
import { Pattern } from './pattern.js';
|
||||||
|
export type MatchSet = Minimatch['set'];
|
||||||
|
export type GlobParts = Exclude<Minimatch['globParts'], undefined>;
|
||||||
|
/**
|
||||||
|
* A `GlobOptions` object may be provided to any of the exported methods, and
|
||||||
|
* must be provided to the `Glob` constructor.
|
||||||
|
*
|
||||||
|
* All options are optional, boolean, and false by default, unless otherwise
|
||||||
|
* noted.
|
||||||
|
*
|
||||||
|
* All resolved options are added to the Glob object as properties.
|
||||||
|
*
|
||||||
|
* If you are running many `glob` operations, you can pass a Glob object as the
|
||||||
|
* `options` argument to a subsequent operation to share the previously loaded
|
||||||
|
* cache.
|
||||||
|
*/
|
||||||
|
export interface GlobOptions {
|
||||||
|
/**
|
||||||
|
* Set to `true` to always receive absolute paths for
|
||||||
|
* matched files. Set to `false` to always return relative paths.
|
||||||
|
*
|
||||||
|
* When this option is not set, absolute paths are returned for patterns
|
||||||
|
* that are absolute, and otherwise paths are returned that are relative
|
||||||
|
* to the `cwd` setting.
|
||||||
|
*
|
||||||
|
* This does _not_ make an extra system call to get
|
||||||
|
* the realpath, it only does string path resolution.
|
||||||
|
*
|
||||||
|
* Conflicts with {@link withFileTypes}
|
||||||
|
*/
|
||||||
|
absolute?: boolean;
|
||||||
|
/**
|
||||||
|
* Set to false to enable {@link windowsPathsNoEscape}
|
||||||
|
*
|
||||||
|
* @deprecated
|
||||||
|
*/
|
||||||
|
allowWindowsEscape?: boolean;
|
||||||
|
/**
|
||||||
|
* The current working directory in which to search. Defaults to
|
||||||
|
* `process.cwd()`.
|
||||||
|
*
|
||||||
|
* May be eiher a string path or a `file://` URL object or string.
|
||||||
|
*/
|
||||||
|
cwd?: string | URL;
|
||||||
|
/**
|
||||||
|
* Include `.dot` files in normal matches and `globstar`
|
||||||
|
* matches. Note that an explicit dot in a portion of the pattern
|
||||||
|
* will always match dot files.
|
||||||
|
*/
|
||||||
|
dot?: boolean;
|
||||||
|
/**
|
||||||
|
* Prepend all relative path strings with `./` (or `.\` on Windows).
|
||||||
|
*
|
||||||
|
* Without this option, returned relative paths are "bare", so instead of
|
||||||
|
* returning `'./foo/bar'`, they are returned as `'foo/bar'`.
|
||||||
|
*
|
||||||
|
* Relative patterns starting with `'../'` are not prepended with `./`, even
|
||||||
|
* if this option is set.
|
||||||
|
*/
|
||||||
|
dotRelative?: boolean;
|
||||||
|
/**
|
||||||
|
* Follow symlinked directories when expanding `**`
|
||||||
|
* patterns. This can result in a lot of duplicate references in
|
||||||
|
* the presence of cyclic links, and make performance quite bad.
|
||||||
|
*
|
||||||
|
* By default, a `**` in a pattern will follow 1 symbolic link if
|
||||||
|
* it is not the first item in the pattern, or none if it is the
|
||||||
|
* first item in the pattern, following the same behavior as Bash.
|
||||||
|
*/
|
||||||
|
follow?: boolean;
|
||||||
|
/**
|
||||||
|
* string or string[], or an object with `ignore` and `ignoreChildren`
|
||||||
|
* methods.
|
||||||
|
*
|
||||||
|
* If a string or string[] is provided, then this is treated as a glob
|
||||||
|
* pattern or array of glob patterns to exclude from matches. To ignore all
|
||||||
|
* children within a directory, as well as the entry itself, append `'/**'`
|
||||||
|
* to the ignore pattern.
|
||||||
|
*
|
||||||
|
* **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of
|
||||||
|
* any other settings.
|
||||||
|
*
|
||||||
|
* If an object is provided that has `ignored(path)` and/or
|
||||||
|
* `childrenIgnored(path)` methods, then these methods will be called to
|
||||||
|
* determine whether any Path is a match or if its children should be
|
||||||
|
* traversed, respectively.
|
||||||
|
*/
|
||||||
|
ignore?: string | string[] | IgnoreLike;
|
||||||
|
/**
|
||||||
|
* Treat brace expansion like `{a,b}` as a "magic" pattern. Has no
|
||||||
|
* effect if {@link nobrace} is set.
|
||||||
|
*
|
||||||
|
* Only has effect on the {@link hasMagic} function.
|
||||||
|
*/
|
||||||
|
magicalBraces?: boolean;
|
||||||
|
/**
|
||||||
|
* Add a `/` character to directory matches. Note that this requires
|
||||||
|
* additional stat calls in some cases.
|
||||||
|
*/
|
||||||
|
mark?: boolean;
|
||||||
|
/**
|
||||||
|
* Perform a basename-only match if the pattern does not contain any slash
|
||||||
|
* characters. That is, `*.js` would be treated as equivalent to
|
||||||
|
* `**\/*.js`, matching all js files in all directories.
|
||||||
|
*/
|
||||||
|
matchBase?: boolean;
|
||||||
|
/**
|
||||||
|
* Limit the directory traversal to a given depth below the cwd.
|
||||||
|
* Note that this does NOT prevent traversal to sibling folders,
|
||||||
|
* root patterns, and so on. It only limits the maximum folder depth
|
||||||
|
* that the walk will descend, relative to the cwd.
|
||||||
|
*/
|
||||||
|
maxDepth?: number;
|
||||||
|
/**
|
||||||
|
* Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||||
|
*/
|
||||||
|
nobrace?: boolean;
|
||||||
|
/**
|
||||||
|
* Perform a case-insensitive match. This defaults to `true` on macOS and
|
||||||
|
* Windows systems, and `false` on all others.
|
||||||
|
*
|
||||||
|
* **Note** `nocase` should only be explicitly set when it is
|
||||||
|
* known that the filesystem's case sensitivity differs from the
|
||||||
|
* platform default. If set `true` on case-sensitive file
|
||||||
|
* systems, or `false` on case-insensitive file systems, then the
|
||||||
|
* walk may return more or less results than expected.
|
||||||
|
*/
|
||||||
|
nocase?: boolean;
|
||||||
|
/**
|
||||||
|
* Do not match directories, only files. (Note: to match
|
||||||
|
* _only_ directories, put a `/` at the end of the pattern.)
|
||||||
|
*/
|
||||||
|
nodir?: boolean;
|
||||||
|
/**
|
||||||
|
* Do not match "extglob" patterns such as `+(a|b)`.
|
||||||
|
*/
|
||||||
|
noext?: boolean;
|
||||||
|
/**
|
||||||
|
* Do not match `**` against multiple filenames. (Ie, treat it as a normal
|
||||||
|
* `*` instead.)
|
||||||
|
*
|
||||||
|
* Conflicts with {@link matchBase}
|
||||||
|
*/
|
||||||
|
noglobstar?: boolean;
|
||||||
|
/**
|
||||||
|
* Defaults to value of `process.platform` if available, or `'linux'` if
|
||||||
|
* not. Setting `platform:'win32'` on non-Windows systems may cause strange
|
||||||
|
* behavior.
|
||||||
|
*/
|
||||||
|
platform?: NodeJS.Platform;
|
||||||
|
/**
|
||||||
|
* Set to true to call `fs.realpath` on all of the
|
||||||
|
* results. In the case of an entry that cannot be resolved, the
|
||||||
|
* entry is omitted. This incurs a slight performance penalty, of
|
||||||
|
* course, because of the added system calls.
|
||||||
|
*/
|
||||||
|
realpath?: boolean;
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* A string path resolved against the `cwd` option, which
|
||||||
|
* is used as the starting point for absolute patterns that start
|
||||||
|
* with `/`, (but not drive letters or UNC paths on Windows).
|
||||||
|
*
|
||||||
|
* Note that this _doesn't_ necessarily limit the walk to the
|
||||||
|
* `root` directory, and doesn't affect the cwd starting point for
|
||||||
|
* non-absolute patterns. A pattern containing `..` will still be
|
||||||
|
* able to traverse out of the root directory, if it is not an
|
||||||
|
* actual root directory on the filesystem, and any non-absolute
|
||||||
|
* patterns will be matched in the `cwd`. For example, the
|
||||||
|
* pattern `/../*` with `{root:'/some/path'}` will return all
|
||||||
|
* files in `/some`, not all files in `/some/path`. The pattern
|
||||||
|
* `*` with `{root:'/some/path'}` will return all the entries in
|
||||||
|
* the cwd, not the entries in `/some/path`.
|
||||||
|
*
|
||||||
|
* To start absolute and non-absolute patterns in the same
|
||||||
|
* path, you can use `{root:''}`. However, be aware that on
|
||||||
|
* Windows systems, a pattern like `x:/*` or `//host/share/*` will
|
||||||
|
* _always_ start in the `x:/` or `//host/share` directory,
|
||||||
|
* regardless of the `root` setting.
|
||||||
|
*/
|
||||||
|
root?: string;
|
||||||
|
/**
|
||||||
|
* A [PathScurry](http://npm.im/path-scurry) object used
|
||||||
|
* to traverse the file system. If the `nocase` option is set
|
||||||
|
* explicitly, then any provided `scurry` object must match this
|
||||||
|
* setting.
|
||||||
|
*/
|
||||||
|
scurry?: PathScurry;
|
||||||
|
/**
|
||||||
|
* Call `lstat()` on all entries, whether required or not to determine
|
||||||
|
* whether it's a valid match. When used with {@link withFileTypes}, this
|
||||||
|
* means that matches will include data such as modified time, permissions,
|
||||||
|
* and so on. Note that this will incur a performance cost due to the added
|
||||||
|
* system calls.
|
||||||
|
*/
|
||||||
|
stat?: boolean;
|
||||||
|
/**
|
||||||
|
* An AbortSignal which will cancel the Glob walk when
|
||||||
|
* triggered.
|
||||||
|
*/
|
||||||
|
signal?: AbortSignal;
|
||||||
|
/**
|
||||||
|
* Use `\\` as a path separator _only_, and
|
||||||
|
* _never_ as an escape character. If set, all `\\` characters are
|
||||||
|
* replaced with `/` in the pattern.
|
||||||
|
*
|
||||||
|
* Note that this makes it **impossible** to match against paths
|
||||||
|
* containing literal glob pattern characters, but allows matching
|
||||||
|
* with patterns constructed using `path.join()` and
|
||||||
|
* `path.resolve()` on Windows platforms, mimicking the (buggy!)
|
||||||
|
* behavior of Glob v7 and before on Windows. Please use with
|
||||||
|
* caution, and be mindful of [the caveat below about Windows
|
||||||
|
* paths](#windows). (For legacy reasons, this is also set if
|
||||||
|
* `allowWindowsEscape` is set to the exact value `false`.)
|
||||||
|
*/
|
||||||
|
windowsPathsNoEscape?: boolean;
|
||||||
|
/**
|
||||||
|
* Return [PathScurry](http://npm.im/path-scurry)
|
||||||
|
* `Path` objects instead of strings. These are similar to a
|
||||||
|
* NodeJS `Dirent` object, but with additional methods and
|
||||||
|
* properties.
|
||||||
|
*
|
||||||
|
* Conflicts with {@link absolute}
|
||||||
|
*/
|
||||||
|
withFileTypes?: boolean;
|
||||||
|
/**
|
||||||
|
* An fs implementation to override some or all of the defaults. See
|
||||||
|
* http://npm.im/path-scurry for details about what can be overridden.
|
||||||
|
*/
|
||||||
|
fs?: FSOption;
|
||||||
|
}
|
||||||
|
export type GlobOptionsWithFileTypesTrue = GlobOptions & {
|
||||||
|
withFileTypes: true;
|
||||||
|
absolute?: undefined;
|
||||||
|
};
|
||||||
|
export type GlobOptionsWithFileTypesFalse = GlobOptions & {
|
||||||
|
withFileTypes?: false;
|
||||||
|
};
|
||||||
|
export type GlobOptionsWithFileTypesUnset = GlobOptions & {
|
||||||
|
withFileTypes?: undefined;
|
||||||
|
};
|
||||||
|
export type Result<Opts> = Opts extends GlobOptionsWithFileTypesTrue ? Path : Opts extends GlobOptionsWithFileTypesFalse ? string : Opts extends GlobOptionsWithFileTypesUnset ? string : string | Path;
|
||||||
|
export type Results<Opts> = Result<Opts>[];
|
||||||
|
export type FileTypes<Opts> = Opts extends GlobOptionsWithFileTypesTrue ? true : Opts extends GlobOptionsWithFileTypesFalse ? false : Opts extends GlobOptionsWithFileTypesUnset ? false : boolean;
|
||||||
|
/**
|
||||||
|
* An object that can perform glob pattern traversals.
|
||||||
|
*/
|
||||||
|
export declare class Glob<Opts extends GlobOptions> implements GlobOptions {
|
||||||
|
absolute?: boolean;
|
||||||
|
cwd: string;
|
||||||
|
root?: string;
|
||||||
|
dot: boolean;
|
||||||
|
dotRelative: boolean;
|
||||||
|
follow: boolean;
|
||||||
|
ignore?: string | string[] | IgnoreLike;
|
||||||
|
magicalBraces: boolean;
|
||||||
|
mark?: boolean;
|
||||||
|
matchBase: boolean;
|
||||||
|
maxDepth: number;
|
||||||
|
nobrace: boolean;
|
||||||
|
nocase: boolean;
|
||||||
|
nodir: boolean;
|
||||||
|
noext: boolean;
|
||||||
|
noglobstar: boolean;
|
||||||
|
pattern: string[];
|
||||||
|
platform: NodeJS.Platform;
|
||||||
|
realpath: boolean;
|
||||||
|
scurry: PathScurry;
|
||||||
|
stat: boolean;
|
||||||
|
signal?: AbortSignal;
|
||||||
|
windowsPathsNoEscape: boolean;
|
||||||
|
withFileTypes: FileTypes<Opts>;
|
||||||
|
/**
|
||||||
|
* The options provided to the constructor.
|
||||||
|
*/
|
||||||
|
opts: Opts;
|
||||||
|
/**
|
||||||
|
* An array of parsed immutable {@link Pattern} objects.
|
||||||
|
*/
|
||||||
|
patterns: Pattern[];
|
||||||
|
/**
|
||||||
|
* All options are stored as properties on the `Glob` object.
|
||||||
|
*
|
||||||
|
* See {@link GlobOptions} for full options descriptions.
|
||||||
|
*
|
||||||
|
* Note that a previous `Glob` object can be passed as the
|
||||||
|
* `GlobOptions` to another `Glob` instantiation to re-use settings
|
||||||
|
* and caches with a new pattern.
|
||||||
|
*
|
||||||
|
* Traversal functions can be called multiple times to run the walk
|
||||||
|
* again.
|
||||||
|
*/
|
||||||
|
constructor(pattern: string | string[], opts: Opts);
|
||||||
|
/**
|
||||||
|
* Returns a Promise that resolves to the results array.
|
||||||
|
*/
|
||||||
|
walk(): Promise<Results<Opts>>;
|
||||||
|
/**
|
||||||
|
* synchronous {@link Glob.walk}
|
||||||
|
*/
|
||||||
|
walkSync(): Results<Opts>;
|
||||||
|
/**
|
||||||
|
* Stream results asynchronously.
|
||||||
|
*/
|
||||||
|
stream(): Minipass<Result<Opts>, Result<Opts>>;
|
||||||
|
/**
|
||||||
|
* Stream results synchronously.
|
||||||
|
*/
|
||||||
|
streamSync(): Minipass<Result<Opts>, Result<Opts>>;
|
||||||
|
/**
|
||||||
|
* Default sync iteration function. Returns a Generator that
|
||||||
|
* iterates over the results.
|
||||||
|
*/
|
||||||
|
iterateSync(): Generator<Result<Opts>, void, void>;
|
||||||
|
[Symbol.iterator](): Generator<Result<Opts>, void, void>;
|
||||||
|
/**
|
||||||
|
* Default async iteration function. Returns an AsyncGenerator that
|
||||||
|
* iterates over the results.
|
||||||
|
*/
|
||||||
|
iterate(): AsyncGenerator<Result<Opts>, void, void>;
|
||||||
|
[Symbol.asyncIterator](): AsyncGenerator<Result<Opts>, void, void>;
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=glob.d.ts.map
|
||||||
1
node_modules/glob/dist/mjs/glob.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/mjs/glob.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,QAAQ,MAAM,UAAU,CAAA;AAC/B,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAWlE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;CACd;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IACnB,QAAQ,CAAC,EAAE,SAAS,CAAA;CACrB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GAChE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GACnE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAE9B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IA6GlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAmBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAezB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAa9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAalD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"}
|
||||||
224
node_modules/glob/dist/mjs/glob.js
generated
vendored
Normal file
224
node_modules/glob/dist/mjs/glob.js
generated
vendored
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
import { Minimatch } from 'minimatch';
|
||||||
|
import { PathScurry, PathScurryDarwin, PathScurryPosix, PathScurryWin32, } from 'path-scurry';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
import { Pattern } from './pattern.js';
|
||||||
|
import { GlobStream, GlobWalker } from './walker.js';
|
||||||
|
// if no process global, just call it linux.
|
||||||
|
// so we default to case-sensitive, / separators
|
||||||
|
const defaultPlatform = typeof process === 'object' &&
|
||||||
|
process &&
|
||||||
|
typeof process.platform === 'string'
|
||||||
|
? process.platform
|
||||||
|
: 'linux';
|
||||||
|
/**
|
||||||
|
* An object that can perform glob pattern traversals.
|
||||||
|
*/
|
||||||
|
export class Glob {
|
||||||
|
absolute;
|
||||||
|
cwd;
|
||||||
|
root;
|
||||||
|
dot;
|
||||||
|
dotRelative;
|
||||||
|
follow;
|
||||||
|
ignore;
|
||||||
|
magicalBraces;
|
||||||
|
mark;
|
||||||
|
matchBase;
|
||||||
|
maxDepth;
|
||||||
|
nobrace;
|
||||||
|
nocase;
|
||||||
|
nodir;
|
||||||
|
noext;
|
||||||
|
noglobstar;
|
||||||
|
pattern;
|
||||||
|
platform;
|
||||||
|
realpath;
|
||||||
|
scurry;
|
||||||
|
stat;
|
||||||
|
signal;
|
||||||
|
windowsPathsNoEscape;
|
||||||
|
withFileTypes;
|
||||||
|
/**
|
||||||
|
* The options provided to the constructor.
|
||||||
|
*/
|
||||||
|
opts;
|
||||||
|
/**
|
||||||
|
* An array of parsed immutable {@link Pattern} objects.
|
||||||
|
*/
|
||||||
|
patterns;
|
||||||
|
/**
|
||||||
|
* All options are stored as properties on the `Glob` object.
|
||||||
|
*
|
||||||
|
* See {@link GlobOptions} for full options descriptions.
|
||||||
|
*
|
||||||
|
* Note that a previous `Glob` object can be passed as the
|
||||||
|
* `GlobOptions` to another `Glob` instantiation to re-use settings
|
||||||
|
* and caches with a new pattern.
|
||||||
|
*
|
||||||
|
* Traversal functions can be called multiple times to run the walk
|
||||||
|
* again.
|
||||||
|
*/
|
||||||
|
constructor(pattern, opts) {
|
||||||
|
this.withFileTypes = !!opts.withFileTypes;
|
||||||
|
this.signal = opts.signal;
|
||||||
|
this.follow = !!opts.follow;
|
||||||
|
this.dot = !!opts.dot;
|
||||||
|
this.dotRelative = !!opts.dotRelative;
|
||||||
|
this.nodir = !!opts.nodir;
|
||||||
|
this.mark = !!opts.mark;
|
||||||
|
if (!opts.cwd) {
|
||||||
|
this.cwd = '';
|
||||||
|
}
|
||||||
|
else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
|
||||||
|
opts.cwd = fileURLToPath(opts.cwd);
|
||||||
|
}
|
||||||
|
this.cwd = opts.cwd || '';
|
||||||
|
this.root = opts.root;
|
||||||
|
this.magicalBraces = !!opts.magicalBraces;
|
||||||
|
this.nobrace = !!opts.nobrace;
|
||||||
|
this.noext = !!opts.noext;
|
||||||
|
this.realpath = !!opts.realpath;
|
||||||
|
this.absolute = opts.absolute;
|
||||||
|
this.noglobstar = !!opts.noglobstar;
|
||||||
|
this.matchBase = !!opts.matchBase;
|
||||||
|
this.maxDepth =
|
||||||
|
typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
|
||||||
|
this.stat = !!opts.stat;
|
||||||
|
this.ignore = opts.ignore;
|
||||||
|
if (this.withFileTypes && this.absolute !== undefined) {
|
||||||
|
throw new Error('cannot set absolute and withFileTypes:true');
|
||||||
|
}
|
||||||
|
if (typeof pattern === 'string') {
|
||||||
|
pattern = [pattern];
|
||||||
|
}
|
||||||
|
this.windowsPathsNoEscape =
|
||||||
|
!!opts.windowsPathsNoEscape ||
|
||||||
|
opts.allowWindowsEscape === false;
|
||||||
|
if (this.windowsPathsNoEscape) {
|
||||||
|
pattern = pattern.map(p => p.replace(/\\/g, '/'));
|
||||||
|
}
|
||||||
|
if (this.matchBase) {
|
||||||
|
if (opts.noglobstar) {
|
||||||
|
throw new TypeError('base matching requires globstar');
|
||||||
|
}
|
||||||
|
pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
|
||||||
|
}
|
||||||
|
this.pattern = pattern;
|
||||||
|
this.platform = opts.platform || defaultPlatform;
|
||||||
|
this.opts = { ...opts, platform: this.platform };
|
||||||
|
if (opts.scurry) {
|
||||||
|
this.scurry = opts.scurry;
|
||||||
|
if (opts.nocase !== undefined &&
|
||||||
|
opts.nocase !== opts.scurry.nocase) {
|
||||||
|
throw new Error('nocase option contradicts provided scurry option');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const Scurry = opts.platform === 'win32'
|
||||||
|
? PathScurryWin32
|
||||||
|
: opts.platform === 'darwin'
|
||||||
|
? PathScurryDarwin
|
||||||
|
: opts.platform
|
||||||
|
? PathScurryPosix
|
||||||
|
: PathScurry;
|
||||||
|
this.scurry = new Scurry(this.cwd, {
|
||||||
|
nocase: opts.nocase,
|
||||||
|
fs: opts.fs,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
this.nocase = this.scurry.nocase;
|
||||||
|
const mmo = {
|
||||||
|
// default nocase based on platform
|
||||||
|
...opts,
|
||||||
|
dot: this.dot,
|
||||||
|
matchBase: this.matchBase,
|
||||||
|
nobrace: this.nobrace,
|
||||||
|
nocase: this.nocase,
|
||||||
|
nocaseMagicOnly: true,
|
||||||
|
nocomment: true,
|
||||||
|
noext: this.noext,
|
||||||
|
nonegate: true,
|
||||||
|
optimizationLevel: 2,
|
||||||
|
platform: this.platform,
|
||||||
|
windowsPathsNoEscape: this.windowsPathsNoEscape,
|
||||||
|
};
|
||||||
|
const mms = this.pattern.map(p => new Minimatch(p, mmo));
|
||||||
|
const [matchSet, globParts] = mms.reduce((set, m) => {
|
||||||
|
set[0].push(...m.set);
|
||||||
|
set[1].push(...m.globParts);
|
||||||
|
return set;
|
||||||
|
}, [[], []]);
|
||||||
|
this.patterns = matchSet.map((set, i) => {
|
||||||
|
return new Pattern(set, globParts[i], 0, this.platform);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async walk() {
|
||||||
|
// Walkers always return array of Path objects, so we just have to
|
||||||
|
// coerce them into the right shape. It will have already called
|
||||||
|
// realpath() if the option was set to do so, so we know that's cached.
|
||||||
|
// start out knowing the cwd, at least
|
||||||
|
return [
|
||||||
|
...(await new GlobWalker(this.patterns, this.scurry.cwd, {
|
||||||
|
...this.opts,
|
||||||
|
maxDepth: this.maxDepth !== Infinity
|
||||||
|
? this.maxDepth + this.scurry.cwd.depth()
|
||||||
|
: Infinity,
|
||||||
|
platform: this.platform,
|
||||||
|
nocase: this.nocase,
|
||||||
|
}).walk()),
|
||||||
|
];
|
||||||
|
}
|
||||||
|
walkSync() {
|
||||||
|
return [
|
||||||
|
...new GlobWalker(this.patterns, this.scurry.cwd, {
|
||||||
|
...this.opts,
|
||||||
|
maxDepth: this.maxDepth !== Infinity
|
||||||
|
? this.maxDepth + this.scurry.cwd.depth()
|
||||||
|
: Infinity,
|
||||||
|
platform: this.platform,
|
||||||
|
nocase: this.nocase,
|
||||||
|
}).walkSync(),
|
||||||
|
];
|
||||||
|
}
|
||||||
|
stream() {
|
||||||
|
return new GlobStream(this.patterns, this.scurry.cwd, {
|
||||||
|
...this.opts,
|
||||||
|
maxDepth: this.maxDepth !== Infinity
|
||||||
|
? this.maxDepth + this.scurry.cwd.depth()
|
||||||
|
: Infinity,
|
||||||
|
platform: this.platform,
|
||||||
|
nocase: this.nocase,
|
||||||
|
}).stream();
|
||||||
|
}
|
||||||
|
streamSync() {
|
||||||
|
return new GlobStream(this.patterns, this.scurry.cwd, {
|
||||||
|
...this.opts,
|
||||||
|
maxDepth: this.maxDepth !== Infinity
|
||||||
|
? this.maxDepth + this.scurry.cwd.depth()
|
||||||
|
: Infinity,
|
||||||
|
platform: this.platform,
|
||||||
|
nocase: this.nocase,
|
||||||
|
}).streamSync();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Default sync iteration function. Returns a Generator that
|
||||||
|
* iterates over the results.
|
||||||
|
*/
|
||||||
|
iterateSync() {
|
||||||
|
return this.streamSync()[Symbol.iterator]();
|
||||||
|
}
|
||||||
|
[Symbol.iterator]() {
|
||||||
|
return this.iterateSync();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Default async iteration function. Returns an AsyncGenerator that
|
||||||
|
* iterates over the results.
|
||||||
|
*/
|
||||||
|
iterate() {
|
||||||
|
return this.stream()[Symbol.asyncIterator]();
|
||||||
|
}
|
||||||
|
[Symbol.asyncIterator]() {
|
||||||
|
return this.iterate();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=glob.js.map
|
||||||
1
node_modules/glob/dist/mjs/glob.js.map
generated
vendored
Normal file
1
node_modules/glob/dist/mjs/glob.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
14
node_modules/glob/dist/mjs/has-magic.d.ts
generated
vendored
Normal file
14
node_modules/glob/dist/mjs/has-magic.d.ts
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import { GlobOptions } from './glob.js';
|
||||||
|
/**
|
||||||
|
* Return true if the patterns provided contain any magic glob characters,
|
||||||
|
* given the options provided.
|
||||||
|
*
|
||||||
|
* Brace expansion is not considered "magic" unless the `magicalBraces` option
|
||||||
|
* is set, as brace expansion just turns one string into an array of strings.
|
||||||
|
* So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
|
||||||
|
* `'xby'` both do not contain any magic glob characters, and it's treated the
|
||||||
|
* same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
|
||||||
|
* is in the options, brace expansion _is_ treated as a pattern having magic.
|
||||||
|
*/
|
||||||
|
export declare const hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean;
|
||||||
|
//# sourceMappingURL=has-magic.d.ts.map
|
||||||
1
node_modules/glob/dist/mjs/has-magic.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/mjs/has-magic.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"has-magic.d.ts","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,WAAW,CAAA;AAEvC;;;;;;;;;;GAUG;AACH,eAAO,MAAM,QAAQ,YACV,MAAM,GAAG,MAAM,EAAE,YACjB,WAAW,KACnB,OAQF,CAAA"}
|
||||||
23
node_modules/glob/dist/mjs/has-magic.js
generated
vendored
Normal file
23
node_modules/glob/dist/mjs/has-magic.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import { Minimatch } from 'minimatch';
|
||||||
|
/**
|
||||||
|
* Return true if the patterns provided contain any magic glob characters,
|
||||||
|
* given the options provided.
|
||||||
|
*
|
||||||
|
* Brace expansion is not considered "magic" unless the `magicalBraces` option
|
||||||
|
* is set, as brace expansion just turns one string into an array of strings.
|
||||||
|
* So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
|
||||||
|
* `'xby'` both do not contain any magic glob characters, and it's treated the
|
||||||
|
* same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
|
||||||
|
* is in the options, brace expansion _is_ treated as a pattern having magic.
|
||||||
|
*/
|
||||||
|
export const hasMagic = (pattern, options = {}) => {
|
||||||
|
if (!Array.isArray(pattern)) {
|
||||||
|
pattern = [pattern];
|
||||||
|
}
|
||||||
|
for (const p of pattern) {
|
||||||
|
if (new Minimatch(p, options).hasMagic())
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
//# sourceMappingURL=has-magic.js.map
|
||||||
1
node_modules/glob/dist/mjs/has-magic.js.map
generated
vendored
Normal file
1
node_modules/glob/dist/mjs/has-magic.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"has-magic.js","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,WAAW,CAAA;AAGrC;;;;;;;;;;GAUG;AACH,MAAM,CAAC,MAAM,QAAQ,GAAG,CACtB,OAA0B,EAC1B,UAAuB,EAAE,EAChB,EAAE;IACX,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAC3B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;KACpB;IACD,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE;QACvB,IAAI,IAAI,SAAS,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,QAAQ,EAAE;YAAE,OAAO,IAAI,CAAA;KACtD;IACD,OAAO,KAAK,CAAA;AACd,CAAC,CAAA"}
|
||||||
20
node_modules/glob/dist/mjs/ignore.d.ts
generated
vendored
Normal file
20
node_modules/glob/dist/mjs/ignore.d.ts
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { Minimatch } from 'minimatch';
|
||||||
|
import { Path } from 'path-scurry';
|
||||||
|
import { GlobWalkerOpts } from './walker.js';
|
||||||
|
export interface IgnoreLike {
|
||||||
|
ignored?: (p: Path) => boolean;
|
||||||
|
childrenIgnored?: (p: Path) => boolean;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Class used to process ignored patterns
|
||||||
|
*/
|
||||||
|
export declare class Ignore implements IgnoreLike {
|
||||||
|
relative: Minimatch[];
|
||||||
|
relativeChildren: Minimatch[];
|
||||||
|
absolute: Minimatch[];
|
||||||
|
absoluteChildren: Minimatch[];
|
||||||
|
constructor(ignored: string[], { nobrace, nocase, noext, noglobstar, platform, }: GlobWalkerOpts);
|
||||||
|
ignored(p: Path): boolean;
|
||||||
|
childrenIgnored(p: Path): boolean;
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=ignore.d.ts.map
|
||||||
1
node_modules/glob/dist/mjs/ignore.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/mjs/ignore.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"ignore.d.ts","sourceRoot":"","sources":["../../src/ignore.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,SAAS,EAAE,MAAM,WAAW,CAAA;AACrC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAElC,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAE5C,MAAM,WAAW,UAAU;IACzB,OAAO,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAA;IAC9B,eAAe,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAA;CACvC;AASD;;GAEG;AACH,qBAAa,MAAO,YAAW,UAAU;IACvC,QAAQ,EAAE,SAAS,EAAE,CAAA;IACrB,gBAAgB,EAAE,SAAS,EAAE,CAAA;IAC7B,QAAQ,EAAE,SAAS,EAAE,CAAA;IACrB,gBAAgB,EAAE,SAAS,EAAE,CAAA;gBAG3B,OAAO,EAAE,MAAM,EAAE,EACjB,EACE,OAAO,EACP,MAAM,EACN,KAAK,EACL,UAAU,EACV,QAA0B,GAC3B,EAAE,cAAc;IAiDnB,OAAO,CAAC,CAAC,EAAE,IAAI,GAAG,OAAO;IAczB,eAAe,CAAC,CAAC,EAAE,IAAI,GAAG,OAAO;CAWlC"}
|
||||||
99
node_modules/glob/dist/mjs/ignore.js
generated
vendored
Normal file
99
node_modules/glob/dist/mjs/ignore.js
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
// give it a pattern, and it'll be able to tell you if
|
||||||
|
// a given path should be ignored.
|
||||||
|
// Ignoring a path ignores its children if the pattern ends in /**
|
||||||
|
// Ignores are always parsed in dot:true mode
|
||||||
|
import { Minimatch } from 'minimatch';
|
||||||
|
import { Pattern } from './pattern.js';
|
||||||
|
const defaultPlatform = typeof process === 'object' &&
|
||||||
|
process &&
|
||||||
|
typeof process.platform === 'string'
|
||||||
|
? process.platform
|
||||||
|
: 'linux';
|
||||||
|
/**
|
||||||
|
* Class used to process ignored patterns
|
||||||
|
*/
|
||||||
|
export class Ignore {
|
||||||
|
relative;
|
||||||
|
relativeChildren;
|
||||||
|
absolute;
|
||||||
|
absoluteChildren;
|
||||||
|
constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) {
|
||||||
|
this.relative = [];
|
||||||
|
this.absolute = [];
|
||||||
|
this.relativeChildren = [];
|
||||||
|
this.absoluteChildren = [];
|
||||||
|
const mmopts = {
|
||||||
|
dot: true,
|
||||||
|
nobrace,
|
||||||
|
nocase,
|
||||||
|
noext,
|
||||||
|
noglobstar,
|
||||||
|
optimizationLevel: 2,
|
||||||
|
platform,
|
||||||
|
nocomment: true,
|
||||||
|
nonegate: true,
|
||||||
|
};
|
||||||
|
// this is a little weird, but it gives us a clean set of optimized
|
||||||
|
// minimatch matchers, without getting tripped up if one of them
|
||||||
|
// ends in /** inside a brace section, and it's only inefficient at
|
||||||
|
// the start of the walk, not along it.
|
||||||
|
// It'd be nice if the Pattern class just had a .test() method, but
|
||||||
|
// handling globstars is a bit of a pita, and that code already lives
|
||||||
|
// in minimatch anyway.
|
||||||
|
// Another way would be if maybe Minimatch could take its set/globParts
|
||||||
|
// as an option, and then we could at least just use Pattern to test
|
||||||
|
// for absolute-ness.
|
||||||
|
// Yet another way, Minimatch could take an array of glob strings, and
|
||||||
|
// a cwd option, and do the right thing.
|
||||||
|
for (const ign of ignored) {
|
||||||
|
const mm = new Minimatch(ign, mmopts);
|
||||||
|
for (let i = 0; i < mm.set.length; i++) {
|
||||||
|
const parsed = mm.set[i];
|
||||||
|
const globParts = mm.globParts[i];
|
||||||
|
const p = new Pattern(parsed, globParts, 0, platform);
|
||||||
|
const m = new Minimatch(p.globString(), mmopts);
|
||||||
|
const children = globParts[globParts.length - 1] === '**';
|
||||||
|
const absolute = p.isAbsolute();
|
||||||
|
if (absolute)
|
||||||
|
this.absolute.push(m);
|
||||||
|
else
|
||||||
|
this.relative.push(m);
|
||||||
|
if (children) {
|
||||||
|
if (absolute)
|
||||||
|
this.absoluteChildren.push(m);
|
||||||
|
else
|
||||||
|
this.relativeChildren.push(m);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ignored(p) {
|
||||||
|
const fullpath = p.fullpath();
|
||||||
|
const fullpaths = `${fullpath}/`;
|
||||||
|
const relative = p.relative() || '.';
|
||||||
|
const relatives = `${relative}/`;
|
||||||
|
for (const m of this.relative) {
|
||||||
|
if (m.match(relative) || m.match(relatives))
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
for (const m of this.absolute) {
|
||||||
|
if (m.match(fullpath) || m.match(fullpaths))
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
childrenIgnored(p) {
|
||||||
|
const fullpath = p.fullpath() + '/';
|
||||||
|
const relative = (p.relative() || '.') + '/';
|
||||||
|
for (const m of this.relativeChildren) {
|
||||||
|
if (m.match(relative))
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
for (const m of this.absoluteChildren) {
|
||||||
|
if (m.match(fullpath))
|
||||||
|
true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=ignore.js.map
|
||||||
1
node_modules/glob/dist/mjs/ignore.js.map
generated
vendored
Normal file
1
node_modules/glob/dist/mjs/ignore.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"ignore.js","sourceRoot":"","sources":["../../src/ignore.ts"],"names":[],"mappings":"AAAA,sDAAsD;AACtD,kCAAkC;AAClC,kEAAkE;AAClE,6CAA6C;AAE7C,OAAO,EAAE,SAAS,EAAE,MAAM,WAAW,CAAA;AAErC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAQtC,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAEb;;GAEG;AACH,MAAM,OAAO,MAAM;IACjB,QAAQ,CAAa;IACrB,gBAAgB,CAAa;IAC7B,QAAQ,CAAa;IACrB,gBAAgB,CAAa;IAE7B,YACE,OAAiB,EACjB,EACE,OAAO,EACP,MAAM,EACN,KAAK,EACL,UAAU,EACV,QAAQ,GAAG,eAAe,GACX;QAEjB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAA;QAClB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAA;QAClB,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,MAAM,MAAM,GAAG;YACb,GAAG,EAAE,IAAI;YACT,OAAO;YACP,MAAM;YACN,KAAK;YACL,UAAU;YACV,iBAAiB,EAAE,CAAC;YACpB,QAAQ;YACR,SAAS,EAAE,IAAI;YACf,QAAQ,EAAE,IAAI;SACf,CAAA;QAED,mEAAmE;QACnE,gEAAgE;QAChE,mEAAmE;QACnE,uCAAuC;QACvC,mEAAmE;QACnE,qEAAqE;QACrE,uBAAuB;QACvB,uEAAuE;QACvE,oEAAoE;QACpE,qBAAqB;QACrB,sEAAsE;QACtE,wCAAwC;QACxC,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;YACzB,MAAM,EAAE,GAAG,IAAI,SAAS,CAAC,GAAG,EAAE,MAAM,CAAC,CAAA;YACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBACtC,MAAM,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;gBACxB,MAAM,SAAS,GAAG,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;gBACjC,MAAM,CAAC,GAAG,IAAI,OAAO,CAAC,MAAM,EAAE,SAAS,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAA;gBACrD,MAAM,CAAC,GAAG,IAAI,SAAS,CAAC,CAAC,CAAC,UAAU,EAAE,EAAE,MAAM,CAAC,CAAA;gBAC/C,MAAM,QAAQ,GAAG,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,CAAA;gBACzD,MAAM,QAAQ,GAAG,CAAC,CAAC,UAAU,EAAE,CAAA;gBAC/B,IAAI,QAAQ;oBAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;;oBAC9B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;gBAC1B,IAAI,QAAQ,EAAE;oBACZ,IAAI,QAAQ;wBAAE,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;;wBACtC,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;iBACnC;aACF;SACF;IACH,CAAC;IAED,OAAO,CAAC,CAAO;QACb,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAA;QAC7B,MAAM,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAA;QAChC,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAA;QACpC,MAAM,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAA;QAChC,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE;YAC7B,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;gBAAE,OAAO,IAAI,CAAA;SACzD;QACD,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE;YAC7B,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;gBAAE,OAAO,IAAI,CAAA;SACzD;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAED,eAAe,CAAC,CAAO;QACrB,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,GAAG,GAAG,CAAA;QACnC,MAAM,QAAQ,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,GAAG,CAAA;QAC5C,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,gBAAgB,EAAE;YACrC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAAE,OAAO,IAAI,CAAA;SACnC;QACD,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,gBAAgB,EAAE;YACrC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAAE,IAAI,CAAA;SAC5B;QACD,OAAO,KAAK,CAAA;IACd,CAAC;CACF"}
|
||||||
72
node_modules/glob/dist/mjs/index.d.ts
generated
vendored
Normal file
72
node_modules/glob/dist/mjs/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
import Minipass from 'minipass';
|
||||||
|
import { Path } from 'path-scurry';
|
||||||
|
import type { GlobOptions, GlobOptionsWithFileTypesFalse, GlobOptionsWithFileTypesTrue, GlobOptionsWithFileTypesUnset } from './glob.js';
|
||||||
|
import { Glob } from './glob.js';
|
||||||
|
/**
|
||||||
|
* Syncronous form of {@link globStream}. Will read all the matches as fast as
|
||||||
|
* you consume them, even all in a single tick if you consume them immediately,
|
||||||
|
* but will still respond to backpressure if they're not consumed immediately.
|
||||||
|
*/
|
||||||
|
export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Minipass<Path, Path>;
|
||||||
|
export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Minipass<string, string>;
|
||||||
|
export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesUnset): Minipass<string, string>;
|
||||||
|
export declare function globStreamSync(pattern: string | string[], options: GlobOptions): Minipass<Path, Path> | Minipass<string, string>;
|
||||||
|
/**
|
||||||
|
* Return a stream that emits all the strings or `Path` objects and
|
||||||
|
* then emits `end` when completed.
|
||||||
|
*/
|
||||||
|
export declare function globStream(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Minipass<string, string>;
|
||||||
|
export declare function globStream(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Minipass<Path, Path>;
|
||||||
|
export declare function globStream(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Minipass<string, string>;
|
||||||
|
export declare function globStream(pattern: string | string[], options: GlobOptions): Minipass<Path, Path> | Minipass<string, string>;
|
||||||
|
/**
|
||||||
|
* Synchronous form of {@link glob}
|
||||||
|
*/
|
||||||
|
export declare function globSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): string[];
|
||||||
|
export declare function globSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Path[];
|
||||||
|
export declare function globSync(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): string[];
|
||||||
|
export declare function globSync(pattern: string | string[], options: GlobOptions): Path[] | string[];
|
||||||
|
/**
|
||||||
|
* Perform an asynchronous glob search for the pattern(s) specified. Returns
|
||||||
|
* [Path](https://isaacs.github.io/path-scurry/classes/PathBase) objects if the
|
||||||
|
* {@link withFileTypes} option is set to `true`. See {@link GlobOptions} for
|
||||||
|
* full option descriptions.
|
||||||
|
*/
|
||||||
|
export declare function glob(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Promise<string[]>;
|
||||||
|
export declare function glob(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Promise<Path[]>;
|
||||||
|
export declare function glob(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Promise<string[]>;
|
||||||
|
export declare function glob(pattern: string | string[], options: GlobOptions): Promise<Path[] | string[]>;
|
||||||
|
/**
|
||||||
|
* Return an async iterator for walking glob pattern matches.
|
||||||
|
*/
|
||||||
|
export declare function globIterate(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): AsyncGenerator<string, void, void>;
|
||||||
|
export declare function globIterate(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): AsyncGenerator<Path, void, void>;
|
||||||
|
export declare function globIterate(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): AsyncGenerator<string, void, void>;
|
||||||
|
export declare function globIterate(pattern: string | string[], options: GlobOptions): AsyncGenerator<Path, void, void> | AsyncGenerator<string, void, void>;
|
||||||
|
/**
|
||||||
|
* Return a sync iterator for walking glob pattern matches.
|
||||||
|
*/
|
||||||
|
export declare function globIterateSync(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Generator<string, void, void>;
|
||||||
|
export declare function globIterateSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Generator<Path, void, void>;
|
||||||
|
export declare function globIterateSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Generator<string, void, void>;
|
||||||
|
export declare function globIterateSync(pattern: string | string[], options: GlobOptions): Generator<Path, void, void> | Generator<string, void, void>;
|
||||||
|
export { escape, unescape } from 'minimatch';
|
||||||
|
export { Glob } from './glob.js';
|
||||||
|
export type { GlobOptions, GlobOptionsWithFileTypesFalse, GlobOptionsWithFileTypesTrue, GlobOptionsWithFileTypesUnset, } from './glob.js';
|
||||||
|
export { hasMagic } from './has-magic.js';
|
||||||
|
export type { IgnoreLike } from './ignore.js';
|
||||||
|
export type { MatchStream } from './walker.js';
|
||||||
|
declare const _default: typeof glob & {
|
||||||
|
glob: typeof glob;
|
||||||
|
globSync: typeof globSync;
|
||||||
|
globStream: typeof globStream;
|
||||||
|
globStreamSync: typeof globStreamSync;
|
||||||
|
globIterate: typeof globIterate;
|
||||||
|
globIterateSync: typeof globIterateSync;
|
||||||
|
Glob: typeof Glob;
|
||||||
|
hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean;
|
||||||
|
escape: (s: string, { windowsPathsNoEscape, }?: Pick<import("minimatch").MinimatchOptions, "windowsPathsNoEscape"> | undefined) => string;
|
||||||
|
unescape: (s: string, { windowsPathsNoEscape, }?: Pick<import("minimatch").MinimatchOptions, "windowsPathsNoEscape"> | undefined) => string;
|
||||||
|
};
|
||||||
|
export default _default;
|
||||||
|
//# sourceMappingURL=index.d.ts.map
|
||||||
1
node_modules/glob/dist/mjs/index.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/mjs/index.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,QAAQ,MAAM,UAAU,CAAA;AAC/B,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,KAAK,EACV,WAAW,EACX,6BAA6B,EAC7B,4BAA4B,EAC5B,6BAA6B,EAC9B,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAGhC;;;;GAIG;AACH,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;AACvB,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAQlD;;;GAGG;AACH,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;AACvB,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAQlD;;GAEG;AACH,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,MAAM,EAAE,CAAA;AACX,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,IAAI,EAAE,CAAA;AACT,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,MAAM,EAAE,CAAA;AACX,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,IAAI,EAAE,GAAG,MAAM,EAAE,CAAA;AAQpB;;;;;GAKG;AACH,wBAAsB,IAAI,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;AACpB,wBAAsB,IAAI,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAA;AAClB,wBAAsB,IAAI,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;AACpB,wBAAsB,IAAI,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,OAAO,CAAC,IAAI,EAAE,GAAG,MAAM,EAAE,CAAC,CAAA;AAQ7B;;GAEG;AACH,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACrC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACnC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACrC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,GAAG,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAQxE;;GAEG;AACH,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAChC,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAC9B,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAChC,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAS9D,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC5C,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAChC,YAAY,EACV,WAAW,EACX,6BAA6B,EAC7B,4BAA4B,EAC5B,6BAA6B,GAC9B,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AACzC,YAAY,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,YAAY,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;;;;;;;;;;;;;AAG9C,wBAWE"}
|
||||||
39
node_modules/glob/dist/mjs/index.js
generated
vendored
Normal file
39
node_modules/glob/dist/mjs/index.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import { escape, unescape } from 'minimatch';
|
||||||
|
import { Glob } from './glob.js';
|
||||||
|
import { hasMagic } from './has-magic.js';
|
||||||
|
export function globStreamSync(pattern, options = {}) {
|
||||||
|
return new Glob(pattern, options).streamSync();
|
||||||
|
}
|
||||||
|
export function globStream(pattern, options = {}) {
|
||||||
|
return new Glob(pattern, options).stream();
|
||||||
|
}
|
||||||
|
export function globSync(pattern, options = {}) {
|
||||||
|
return new Glob(pattern, options).walkSync();
|
||||||
|
}
|
||||||
|
export async function glob(pattern, options = {}) {
|
||||||
|
return new Glob(pattern, options).walk();
|
||||||
|
}
|
||||||
|
export function globIterate(pattern, options = {}) {
|
||||||
|
return new Glob(pattern, options).iterate();
|
||||||
|
}
|
||||||
|
export function globIterateSync(pattern, options = {}) {
|
||||||
|
return new Glob(pattern, options).iterateSync();
|
||||||
|
}
|
||||||
|
/* c8 ignore start */
|
||||||
|
export { escape, unescape } from 'minimatch';
|
||||||
|
export { Glob } from './glob.js';
|
||||||
|
export { hasMagic } from './has-magic.js';
|
||||||
|
/* c8 ignore stop */
|
||||||
|
export default Object.assign(glob, {
|
||||||
|
glob,
|
||||||
|
globSync,
|
||||||
|
globStream,
|
||||||
|
globStreamSync,
|
||||||
|
globIterate,
|
||||||
|
globIterateSync,
|
||||||
|
Glob,
|
||||||
|
hasMagic,
|
||||||
|
escape,
|
||||||
|
unescape,
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=index.js.map
|
||||||
1
node_modules/glob/dist/mjs/index.js.map
generated
vendored
Normal file
1
node_modules/glob/dist/mjs/index.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAS5C,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAChC,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AAuBzC,MAAM,UAAU,cAAc,CAC5B,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,UAAU,EAAE,CAAA;AAChD,CAAC;AAsBD,MAAM,UAAU,UAAU,CACxB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;AAC5C,CAAC;AAqBD,MAAM,UAAU,QAAQ,CACtB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,QAAQ,EAAE,CAAA;AAC9C,CAAC;AAwBD,MAAM,CAAC,KAAK,UAAU,IAAI,CACxB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAA;AAC1C,CAAC;AAqBD,MAAM,UAAU,WAAW,CACzB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,OAAO,EAAE,CAAA;AAC7C,CAAC;AAqBD,MAAM,UAAU,eAAe,CAC7B,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,WAAW,EAAE,CAAA;AACjD,CAAC;AAED,qBAAqB;AACrB,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC5C,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAOhC,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AAGzC,oBAAoB;AAEpB,eAAe,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE;IACjC,IAAI;IACJ,QAAQ;IACR,UAAU;IACV,cAAc;IACd,WAAW;IACX,eAAe;IACf,IAAI;IACJ,QAAQ;IACR,MAAM;IACN,QAAQ;CACT,CAAC,CAAA"}
|
||||||
3
node_modules/glob/dist/mjs/package.json
generated
vendored
Normal file
3
node_modules/glob/dist/mjs/package.json
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"type": "module"
|
||||||
|
}
|
||||||
77
node_modules/glob/dist/mjs/pattern.d.ts
generated
vendored
Normal file
77
node_modules/glob/dist/mjs/pattern.d.ts
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
import { GLOBSTAR } from 'minimatch';
|
||||||
|
export type MMPattern = string | RegExp | typeof GLOBSTAR;
|
||||||
|
export type PatternList = [p: MMPattern, ...rest: MMPattern[]];
|
||||||
|
export type UNCPatternList = [
|
||||||
|
p0: '',
|
||||||
|
p1: '',
|
||||||
|
p2: string,
|
||||||
|
p3: string,
|
||||||
|
...rest: MMPattern[]
|
||||||
|
];
|
||||||
|
export type DrivePatternList = [p0: string, ...rest: MMPattern[]];
|
||||||
|
export type AbsolutePatternList = [p0: '', ...rest: MMPattern[]];
|
||||||
|
export type GlobList = [p: string, ...rest: string[]];
|
||||||
|
/**
|
||||||
|
* An immutable-ish view on an array of glob parts and their parsed
|
||||||
|
* results
|
||||||
|
*/
|
||||||
|
export declare class Pattern {
|
||||||
|
#private;
|
||||||
|
readonly length: number;
|
||||||
|
constructor(patternList: MMPattern[], globList: string[], index: number, platform: NodeJS.Platform);
|
||||||
|
/**
|
||||||
|
* The first entry in the parsed list of patterns
|
||||||
|
*/
|
||||||
|
pattern(): MMPattern;
|
||||||
|
/**
|
||||||
|
* true of if pattern() returns a string
|
||||||
|
*/
|
||||||
|
isString(): boolean;
|
||||||
|
/**
|
||||||
|
* true of if pattern() returns GLOBSTAR
|
||||||
|
*/
|
||||||
|
isGlobstar(): boolean;
|
||||||
|
/**
|
||||||
|
* true if pattern() returns a regexp
|
||||||
|
*/
|
||||||
|
isRegExp(): boolean;
|
||||||
|
/**
|
||||||
|
* The /-joined set of glob parts that make up this pattern
|
||||||
|
*/
|
||||||
|
globString(): string;
|
||||||
|
/**
|
||||||
|
* true if there are more pattern parts after this one
|
||||||
|
*/
|
||||||
|
hasMore(): boolean;
|
||||||
|
/**
|
||||||
|
* The rest of the pattern after this part, or null if this is the end
|
||||||
|
*/
|
||||||
|
rest(): Pattern | null;
|
||||||
|
/**
|
||||||
|
* true if the pattern represents a //unc/path/ on windows
|
||||||
|
*/
|
||||||
|
isUNC(): boolean;
|
||||||
|
/**
|
||||||
|
* True if the pattern starts with a drive letter on Windows
|
||||||
|
*/
|
||||||
|
isDrive(): boolean;
|
||||||
|
/**
|
||||||
|
* True if the pattern is rooted on an absolute path
|
||||||
|
*/
|
||||||
|
isAbsolute(): boolean;
|
||||||
|
/**
|
||||||
|
* consume the root of the pattern, and return it
|
||||||
|
*/
|
||||||
|
root(): string;
|
||||||
|
/**
|
||||||
|
* Check to see if the current globstar pattern is allowed to follow
|
||||||
|
* a symbolic link.
|
||||||
|
*/
|
||||||
|
checkFollowGlobstar(): boolean;
|
||||||
|
/**
|
||||||
|
* Mark that the current globstar pattern is following a symbolic link
|
||||||
|
*/
|
||||||
|
markFollowGlobstar(): boolean;
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=pattern.d.ts.map
|
||||||
1
node_modules/glob/dist/mjs/pattern.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/mjs/pattern.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"pattern.d.ts","sourceRoot":"","sources":["../../src/pattern.ts"],"names":[],"mappings":";AAEA,OAAO,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AACpC,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,QAAQ,CAAA;AAGzD,MAAM,MAAM,WAAW,GAAG,CAAC,CAAC,EAAE,SAAS,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAC9D,MAAM,MAAM,cAAc,GAAG;IAC3B,EAAE,EAAE,EAAE;IACN,EAAE,EAAE,EAAE;IACN,EAAE,EAAE,MAAM;IACV,EAAE,EAAE,MAAM;IACV,GAAG,IAAI,EAAE,SAAS,EAAE;CACrB,CAAA;AACD,MAAM,MAAM,gBAAgB,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AACjE,MAAM,MAAM,mBAAmB,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAChE,MAAM,MAAM,QAAQ,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;AAMrD;;;GAGG;AACH,qBAAa,OAAO;;IAIlB,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAA;gBAUrB,WAAW,EAAE,SAAS,EAAE,EACxB,QAAQ,EAAE,MAAM,EAAE,EAClB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,CAAC,QAAQ;IA6D3B;;OAEG;IACH,OAAO,IAAI,SAAS;IAIpB;;OAEG;IACH,QAAQ,IAAI,OAAO;IAGnB;;OAEG;IACH,UAAU,IAAI,OAAO;IAGrB;;OAEG;IACH,QAAQ,IAAI,OAAO;IAInB;;OAEG;IACH,UAAU,IAAI,MAAM;IAUpB;;OAEG;IACH,OAAO,IAAI,OAAO;IAIlB;;OAEG;IACH,IAAI,IAAI,OAAO,GAAG,IAAI;IAetB;;OAEG;IACH,KAAK,IAAI,OAAO;IAoBhB;;OAEG;IACH,OAAO,IAAI,OAAO;IAelB;;OAEG;IACH,UAAU,IAAI,OAAO;IAUrB;;OAEG;IACH,IAAI,IAAI,MAAM;IAOd;;;OAGG;IACH,mBAAmB,IAAI,OAAO;IAQ9B;;OAEG;IACH,kBAAkB,IAAI,OAAO;CAM9B"}
|
||||||
215
node_modules/glob/dist/mjs/pattern.js
generated
vendored
Normal file
215
node_modules/glob/dist/mjs/pattern.js
generated
vendored
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
// this is just a very light wrapper around 2 arrays with an offset index
|
||||||
|
import { GLOBSTAR } from 'minimatch';
|
||||||
|
const isPatternList = (pl) => pl.length >= 1;
|
||||||
|
const isGlobList = (gl) => gl.length >= 1;
|
||||||
|
/**
|
||||||
|
* An immutable-ish view on an array of glob parts and their parsed
|
||||||
|
* results
|
||||||
|
*/
|
||||||
|
export class Pattern {
|
||||||
|
#patternList;
|
||||||
|
#globList;
|
||||||
|
#index;
|
||||||
|
length;
|
||||||
|
#platform;
|
||||||
|
#rest;
|
||||||
|
#globString;
|
||||||
|
#isDrive;
|
||||||
|
#isUNC;
|
||||||
|
#isAbsolute;
|
||||||
|
#followGlobstar = true;
|
||||||
|
constructor(patternList, globList, index, platform) {
|
||||||
|
if (!isPatternList(patternList)) {
|
||||||
|
throw new TypeError('empty pattern list');
|
||||||
|
}
|
||||||
|
if (!isGlobList(globList)) {
|
||||||
|
throw new TypeError('empty glob list');
|
||||||
|
}
|
||||||
|
if (globList.length !== patternList.length) {
|
||||||
|
throw new TypeError('mismatched pattern list and glob list lengths');
|
||||||
|
}
|
||||||
|
this.length = patternList.length;
|
||||||
|
if (index < 0 || index >= this.length) {
|
||||||
|
throw new TypeError('index out of range');
|
||||||
|
}
|
||||||
|
this.#patternList = patternList;
|
||||||
|
this.#globList = globList;
|
||||||
|
this.#index = index;
|
||||||
|
this.#platform = platform;
|
||||||
|
// normalize root entries of absolute patterns on initial creation.
|
||||||
|
if (this.#index === 0) {
|
||||||
|
// c: => ['c:/']
|
||||||
|
// C:/ => ['C:/']
|
||||||
|
// C:/x => ['C:/', 'x']
|
||||||
|
// //host/share => ['//host/share/']
|
||||||
|
// //host/share/ => ['//host/share/']
|
||||||
|
// //host/share/x => ['//host/share/', 'x']
|
||||||
|
// /etc => ['/', 'etc']
|
||||||
|
// / => ['/']
|
||||||
|
if (this.isUNC()) {
|
||||||
|
// '' / '' / 'host' / 'share'
|
||||||
|
const [p0, p1, p2, p3, ...prest] = this.#patternList;
|
||||||
|
const [g0, g1, g2, g3, ...grest] = this.#globList;
|
||||||
|
if (prest[0] === '') {
|
||||||
|
// ends in /
|
||||||
|
prest.shift();
|
||||||
|
grest.shift();
|
||||||
|
}
|
||||||
|
const p = [p0, p1, p2, p3, ''].join('/');
|
||||||
|
const g = [g0, g1, g2, g3, ''].join('/');
|
||||||
|
this.#patternList = [p, ...prest];
|
||||||
|
this.#globList = [g, ...grest];
|
||||||
|
this.length = this.#patternList.length;
|
||||||
|
}
|
||||||
|
else if (this.isDrive() || this.isAbsolute()) {
|
||||||
|
const [p1, ...prest] = this.#patternList;
|
||||||
|
const [g1, ...grest] = this.#globList;
|
||||||
|
if (prest[0] === '') {
|
||||||
|
// ends in /
|
||||||
|
prest.shift();
|
||||||
|
grest.shift();
|
||||||
|
}
|
||||||
|
const p = p1 + '/';
|
||||||
|
const g = g1 + '/';
|
||||||
|
this.#patternList = [p, ...prest];
|
||||||
|
this.#globList = [g, ...grest];
|
||||||
|
this.length = this.#patternList.length;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The first entry in the parsed list of patterns
|
||||||
|
*/
|
||||||
|
pattern() {
|
||||||
|
return this.#patternList[this.#index];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* true of if pattern() returns a string
|
||||||
|
*/
|
||||||
|
isString() {
|
||||||
|
return typeof this.#patternList[this.#index] === 'string';
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* true of if pattern() returns GLOBSTAR
|
||||||
|
*/
|
||||||
|
isGlobstar() {
|
||||||
|
return this.#patternList[this.#index] === GLOBSTAR;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* true if pattern() returns a regexp
|
||||||
|
*/
|
||||||
|
isRegExp() {
|
||||||
|
return this.#patternList[this.#index] instanceof RegExp;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The /-joined set of glob parts that make up this pattern
|
||||||
|
*/
|
||||||
|
globString() {
|
||||||
|
return (this.#globString =
|
||||||
|
this.#globString ||
|
||||||
|
(this.#index === 0
|
||||||
|
? this.isAbsolute()
|
||||||
|
? this.#globList[0] + this.#globList.slice(1).join('/')
|
||||||
|
: this.#globList.join('/')
|
||||||
|
: this.#globList.slice(this.#index).join('/')));
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* true if there are more pattern parts after this one
|
||||||
|
*/
|
||||||
|
hasMore() {
|
||||||
|
return this.length > this.#index + 1;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The rest of the pattern after this part, or null if this is the end
|
||||||
|
*/
|
||||||
|
rest() {
|
||||||
|
if (this.#rest !== undefined)
|
||||||
|
return this.#rest;
|
||||||
|
if (!this.hasMore())
|
||||||
|
return (this.#rest = null);
|
||||||
|
this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
|
||||||
|
this.#rest.#isAbsolute = this.#isAbsolute;
|
||||||
|
this.#rest.#isUNC = this.#isUNC;
|
||||||
|
this.#rest.#isDrive = this.#isDrive;
|
||||||
|
return this.#rest;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* true if the pattern represents a //unc/path/ on windows
|
||||||
|
*/
|
||||||
|
isUNC() {
|
||||||
|
const pl = this.#patternList;
|
||||||
|
return this.#isUNC !== undefined
|
||||||
|
? this.#isUNC
|
||||||
|
: (this.#isUNC =
|
||||||
|
this.#platform === 'win32' &&
|
||||||
|
this.#index === 0 &&
|
||||||
|
pl[0] === '' &&
|
||||||
|
pl[1] === '' &&
|
||||||
|
typeof pl[2] === 'string' &&
|
||||||
|
!!pl[2] &&
|
||||||
|
typeof pl[3] === 'string' &&
|
||||||
|
!!pl[3]);
|
||||||
|
}
|
||||||
|
// pattern like C:/...
|
||||||
|
// split = ['C:', ...]
|
||||||
|
// XXX: would be nice to handle patterns like `c:*` to test the cwd
|
||||||
|
// in c: for *, but I don't know of a way to even figure out what that
|
||||||
|
// cwd is without actually chdir'ing into it?
|
||||||
|
/**
|
||||||
|
* True if the pattern starts with a drive letter on Windows
|
||||||
|
*/
|
||||||
|
isDrive() {
|
||||||
|
const pl = this.#patternList;
|
||||||
|
return this.#isDrive !== undefined
|
||||||
|
? this.#isDrive
|
||||||
|
: (this.#isDrive =
|
||||||
|
this.#platform === 'win32' &&
|
||||||
|
this.#index === 0 &&
|
||||||
|
this.length > 1 &&
|
||||||
|
typeof pl[0] === 'string' &&
|
||||||
|
/^[a-z]:$/i.test(pl[0]));
|
||||||
|
}
|
||||||
|
// pattern = '/' or '/...' or '/x/...'
|
||||||
|
// split = ['', ''] or ['', ...] or ['', 'x', ...]
|
||||||
|
// Drive and UNC both considered absolute on windows
|
||||||
|
/**
|
||||||
|
* True if the pattern is rooted on an absolute path
|
||||||
|
*/
|
||||||
|
isAbsolute() {
|
||||||
|
const pl = this.#patternList;
|
||||||
|
return this.#isAbsolute !== undefined
|
||||||
|
? this.#isAbsolute
|
||||||
|
: (this.#isAbsolute =
|
||||||
|
(pl[0] === '' && pl.length > 1) ||
|
||||||
|
this.isDrive() ||
|
||||||
|
this.isUNC());
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* consume the root of the pattern, and return it
|
||||||
|
*/
|
||||||
|
root() {
|
||||||
|
const p = this.#patternList[0];
|
||||||
|
return typeof p === 'string' && this.isAbsolute() && this.#index === 0
|
||||||
|
? p
|
||||||
|
: '';
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Check to see if the current globstar pattern is allowed to follow
|
||||||
|
* a symbolic link.
|
||||||
|
*/
|
||||||
|
checkFollowGlobstar() {
|
||||||
|
return !(this.#index === 0 ||
|
||||||
|
!this.isGlobstar() ||
|
||||||
|
!this.#followGlobstar);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Mark that the current globstar pattern is following a symbolic link
|
||||||
|
*/
|
||||||
|
markFollowGlobstar() {
|
||||||
|
if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
|
||||||
|
return false;
|
||||||
|
this.#followGlobstar = false;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=pattern.js.map
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user