mirror of
https://github.com/github/codeql-action.git
synced 2025-12-14 19:39:10 +08:00
Compare commits
173 Commits
codeql-bun
...
v1.1.9
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6c3ae45f3a | ||
|
|
8371fda073 | ||
|
|
e3983ef751 | ||
|
|
e34f3ea243 | ||
|
|
d9937ad6ad | ||
|
|
f703d1ca07 | ||
|
|
acf17f7547 | ||
|
|
7502d6e991 | ||
|
|
cbce00d08d | ||
|
|
72861144fd | ||
|
|
6dd9baf8be | ||
|
|
ff8b365e79 | ||
|
|
eed184a534 | ||
|
|
c76f0b5b07 | ||
|
|
bf4ba6945d | ||
|
|
d2d14adf3e | ||
|
|
95b49c3e6b | ||
|
|
80771fd2d0 | ||
|
|
2b8fdb3f2e | ||
|
|
074853a9a2 | ||
|
|
ce63ab5d00 | ||
|
|
e87e2d8201 | ||
|
|
23b7196b6b | ||
|
|
e6e327771b | ||
|
|
b9577df761 | ||
|
|
808c29257b | ||
|
|
5b5ed44ab7 | ||
|
|
faf9d4b499 | ||
|
|
8b2f5d7158 | ||
|
|
0ba58d8497 | ||
|
|
3962f1bd85 | ||
|
|
9daf1de73c | ||
|
|
bce749b10f | ||
|
|
fce4a01cd7 | ||
|
|
bac9320f4f | ||
|
|
b3bf557359 | ||
|
|
f6312f1322 | ||
|
|
c5c5bdabb9 | ||
|
|
e7869d541b | ||
|
|
7a12645d7e | ||
|
|
9f20addbf2 | ||
|
|
780f4ee1bf | ||
|
|
baf90d17d2 | ||
|
|
6f174084dd | ||
|
|
b0c570ef83 | ||
|
|
2d80fe85fc | ||
|
|
e2cc7cc006 | ||
|
|
0c80741707 | ||
|
|
c7b049b347 | ||
|
|
792bbfea04 | ||
|
|
f679ec9aa9 | ||
|
|
d9f89b3dfd | ||
|
|
0ab00f44cb | ||
|
|
026ff35db0 | ||
|
|
1ed1437484 | ||
|
|
3ed22c8145 | ||
|
|
739937f14e | ||
|
|
0ecdac49ad | ||
|
|
426a3951ee | ||
|
|
a0b596246a | ||
|
|
5d3e1a701c | ||
|
|
b9bb8dd18d | ||
|
|
1fc1008278 | ||
|
|
11673755ab | ||
|
|
7eac76fcb4 | ||
|
|
3d10ffe493 | ||
|
|
f5e5590fc8 | ||
|
|
380041ed00 | ||
|
|
8165d30832 | ||
|
|
d0ca51f5e9 | ||
|
|
0182a2c78c | ||
|
|
488f78249e | ||
|
|
9cab82f202 | ||
|
|
43d066495c | ||
|
|
f090899ed0 | ||
|
|
8a00ed086d | ||
|
|
935969c6f7 | ||
|
|
e26813cf98 | ||
|
|
2c03704a6c | ||
|
|
dd6b592e3e | ||
|
|
a90d8bf711 | ||
|
|
dc0338e493 | ||
|
|
57096fe795 | ||
|
|
b0ddf36abe | ||
|
|
1ea2f2d7f1 | ||
|
|
9dcc141f12 | ||
|
|
ea751a9fae | ||
|
|
a2949f47b3 | ||
|
|
7871f0d5e1 | ||
|
|
e6f3e049b4 | ||
|
|
e83a1d469e | ||
|
|
894faced79 | ||
|
|
a9095cefc9 | ||
|
|
4d339ae3ec | ||
|
|
381ea36211 | ||
|
|
e769c2dd6e | ||
|
|
4c1021c504 | ||
|
|
bae3a3acab | ||
|
|
9da34a6ec6 | ||
|
|
f83be76fd8 | ||
|
|
b45efc9e42 | ||
|
|
75743c96fc | ||
|
|
bcd5c027de | ||
|
|
9885f86fab | ||
|
|
03a275bc11 | ||
|
|
ee3341a9d8 | ||
|
|
28eead2408 | ||
|
|
a4da970395 | ||
|
|
34a1681e50 | ||
|
|
8833977736 | ||
|
|
bfe9e81020 | ||
|
|
1d58cc1f27 | ||
|
|
d8576e34bf | ||
|
|
f1060fbba0 | ||
|
|
af34c6da92 | ||
|
|
282b607642 | ||
|
|
f0e2f3c053 | ||
|
|
73ba7ffb48 | ||
|
|
2f4dd4bb41 | ||
|
|
8237e85158 | ||
|
|
eea7cf19ff | ||
|
|
fdc2a903c1 | ||
|
|
c22162c09d | ||
|
|
01c72238c1 | ||
|
|
63b2636c23 | ||
|
|
0ed0799824 | ||
|
|
9e403590f4 | ||
|
|
45b96c3de6 | ||
|
|
9a709c116e | ||
|
|
43c9f26143 | ||
|
|
b949e494e4 | ||
|
|
3d23aade46 | ||
|
|
d625a00cee | ||
|
|
077ec096bb | ||
|
|
4d6e9c02ac | ||
|
|
839aa81918 | ||
|
|
6d1f0a0357 | ||
|
|
88db5e75ec | ||
|
|
d068f5372a | ||
|
|
044f112dc1 | ||
|
|
f7846479e6 | ||
|
|
d0bd80897c | ||
|
|
bed132dae4 | ||
|
|
9d26fe0cb3 | ||
|
|
6e57bbac6c | ||
|
|
85cfdb24f4 | ||
|
|
df164705ad | ||
|
|
da7944b165 | ||
|
|
33599909af | ||
|
|
f143182488 | ||
|
|
0b037b4fcb | ||
|
|
1668e0a2bf | ||
|
|
bd4757cd6b | ||
|
|
5fb01dd153 | ||
|
|
124e7d96a6 | ||
|
|
b8f3a377bf | ||
|
|
4b465cb3ce | ||
|
|
d76b18254a | ||
|
|
33f749f1c9 | ||
|
|
ccda44cac5 | ||
|
|
81827d3fc6 | ||
|
|
b386fd4443 | ||
|
|
2a7a517ea5 | ||
|
|
ca5ed24270 | ||
|
|
fb22523acc | ||
|
|
b887a2ce1e | ||
|
|
686c3a37f0 | ||
|
|
c9973ef56b | ||
|
|
c3010cb18a | ||
|
|
5d72058994 | ||
|
|
76c3e91b1f | ||
|
|
5c2600e559 | ||
|
|
a92e8775d8 |
226
.github/update-release-branch.py
vendored
226
.github/update-release-branch.py
vendored
@@ -1,12 +1,9 @@
|
|||||||
|
import argparse
|
||||||
import datetime
|
import datetime
|
||||||
from github import Github
|
from github import Github
|
||||||
import random
|
|
||||||
import requests
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import json
|
import json
|
||||||
import datetime
|
|
||||||
import os
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
EMPTY_CHANGELOG = """# CodeQL Action and CodeQL Runner Changelog
|
EMPTY_CHANGELOG = """# CodeQL Action and CodeQL Runner Changelog
|
||||||
|
|
||||||
@@ -16,21 +13,25 @@ No user facing changes.
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# The branch being merged from.
|
# Value of the mode flag for a v1 release
|
||||||
# This is the one that contains day-to-day development work.
|
V1_MODE = 'v1-release'
|
||||||
MAIN_BRANCH = 'main'
|
|
||||||
# The branch being merged into.
|
# Value of the mode flag for a v2 release
|
||||||
# This is the release branch that users reference.
|
V2_MODE = 'v2-release'
|
||||||
LATEST_RELEASE_BRANCH = 'v1'
|
|
||||||
|
SOURCE_BRANCH_FOR_MODE = { V1_MODE: 'releases/v2', V2_MODE: 'main' }
|
||||||
|
TARGET_BRANCH_FOR_MODE = { V1_MODE: 'releases/v1', V2_MODE: 'releases/v2' }
|
||||||
|
|
||||||
# Name of the remote
|
# Name of the remote
|
||||||
ORIGIN = 'origin'
|
ORIGIN = 'origin'
|
||||||
|
|
||||||
# Runs git with the given args and returns the stdout.
|
# Runs git with the given args and returns the stdout.
|
||||||
# Raises an error if git does not exit successfully.
|
# Raises an error if git does not exit successfully (unless passed
|
||||||
def run_git(*args):
|
# allow_non_zero_exit_code=True).
|
||||||
|
def run_git(*args, allow_non_zero_exit_code=False):
|
||||||
cmd = ['git', *args]
|
cmd = ['git', *args]
|
||||||
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
if (p.returncode != 0):
|
if not allow_non_zero_exit_code and p.returncode != 0:
|
||||||
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
|
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
|
||||||
return p.stdout.decode('ascii')
|
return p.stdout.decode('ascii')
|
||||||
|
|
||||||
@@ -38,8 +39,10 @@ def run_git(*args):
|
|||||||
def branch_exists_on_remote(branch_name):
|
def branch_exists_on_remote(branch_name):
|
||||||
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
||||||
|
|
||||||
# Opens a PR from the given branch to the release branch
|
# Opens a PR from the given branch to the target branch
|
||||||
def open_pr(repo, all_commits, short_main_sha, branch_name):
|
def open_pr(
|
||||||
|
repo, all_commits, source_branch_short_sha, new_branch_name, source_branch, target_branch,
|
||||||
|
conductor, is_v2_release, labels, conflicted_files):
|
||||||
# Sort the commits into the pull requests that introduced them,
|
# Sort the commits into the pull requests that introduced them,
|
||||||
# and any commits that don't have a pull request
|
# and any commits that don't have a pull request
|
||||||
pull_requests = []
|
pull_requests = []
|
||||||
@@ -61,9 +64,8 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
|
|||||||
|
|
||||||
# Start constructing the body text
|
# Start constructing the body text
|
||||||
body = []
|
body = []
|
||||||
body.append('Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH)
|
body.append('Merging ' + source_branch_short_sha + ' into ' + target_branch)
|
||||||
|
|
||||||
conductor = get_conductor(repo, pull_requests, commits_without_pull_requests)
|
|
||||||
body.append('')
|
body.append('')
|
||||||
body.append('Conductor for this PR is @' + conductor)
|
body.append('Conductor for this PR is @' + conductor)
|
||||||
|
|
||||||
@@ -80,43 +82,46 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
|
|||||||
body.append('')
|
body.append('')
|
||||||
body.append('Contains the following commits not from a pull request:')
|
body.append('Contains the following commits not from a pull request:')
|
||||||
for commit in commits_without_pull_requests:
|
for commit in commits_without_pull_requests:
|
||||||
body.append('- ' + commit.sha + ' - ' + get_truncated_commit_message(commit) + ' (@' + commit.author.login + ')')
|
author_description = ' (@' + commit.author.login + ')' if commit.author is not None else ''
|
||||||
|
body.append('- ' + commit.sha + ' - ' + get_truncated_commit_message(commit) + author_description)
|
||||||
|
|
||||||
body.append('')
|
body.append('')
|
||||||
body.append('Please review the following:')
|
body.append('Please review the following:')
|
||||||
|
if len(conflicted_files) > 0:
|
||||||
|
body.append(' - [ ] You have added commits to this branch that resolve the merge conflicts ' +
|
||||||
|
'in the following files:')
|
||||||
|
body.extend([f' - [ ] `{file}`' for file in conflicted_files])
|
||||||
|
body.append(' - [ ] Another maintainer has reviewed the additional commits you added to this ' +
|
||||||
|
'branch to resolve the merge conflicts.')
|
||||||
body.append(' - [ ] The CHANGELOG displays the correct version and date.')
|
body.append(' - [ ] The CHANGELOG displays the correct version and date.')
|
||||||
body.append(' - [ ] The CHANGELOG includes all relevant, user-facing changes since the last release.')
|
body.append(' - [ ] The CHANGELOG includes all relevant, user-facing changes since the last release.')
|
||||||
body.append(' - [ ] There are no unexpected commits being merged into the ' + LATEST_RELEASE_BRANCH + ' branch.')
|
body.append(' - [ ] There are no unexpected commits being merged into the ' + target_branch + ' branch.')
|
||||||
body.append(' - [ ] The docs team is aware of any documentation changes that need to be released.')
|
body.append(' - [ ] The docs team is aware of any documentation changes that need to be released.')
|
||||||
body.append(' - [ ] The mergeback PR is merged back into ' + MAIN_BRANCH + ' after this PR is merged.')
|
if is_v2_release:
|
||||||
|
body.append(' - [ ] The mergeback PR is merged back into ' + source_branch + ' after this PR is merged.')
|
||||||
|
body.append(' - [ ] The v1 release PR is merged after this PR is merged.')
|
||||||
|
|
||||||
title = 'Merge ' + MAIN_BRANCH + ' into ' + LATEST_RELEASE_BRANCH
|
title = 'Merge ' + source_branch + ' into ' + target_branch
|
||||||
|
|
||||||
# Create the pull request
|
# Create the pull request
|
||||||
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft so that
|
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft so that
|
||||||
# a maintainer can take the PR out of draft, thereby triggering the PR checks.
|
# a maintainer can take the PR out of draft, thereby triggering the PR checks.
|
||||||
pr = repo.create_pull(title=title, body='\n'.join(body), head=branch_name, base=LATEST_RELEASE_BRANCH, draft=True)
|
pr = repo.create_pull(title=title, body='\n'.join(body), head=new_branch_name, base=target_branch, draft=True)
|
||||||
|
pr.add_to_labels(*labels)
|
||||||
print('Created PR #' + str(pr.number))
|
print('Created PR #' + str(pr.number))
|
||||||
|
|
||||||
# Assign the conductor
|
# Assign the conductor
|
||||||
pr.add_to_assignees(conductor)
|
pr.add_to_assignees(conductor)
|
||||||
print('Assigned PR to ' + conductor)
|
print('Assigned PR to ' + conductor)
|
||||||
|
|
||||||
# Gets the person who should be in charge of the mergeback PR
|
# Gets a list of the SHAs of all commits that have happened on the source branch
|
||||||
def get_conductor(repo, pull_requests, other_commits):
|
# since the last release to the target branch.
|
||||||
# If there are any PRs then use whoever merged the last one
|
# This will not include any commits that exist on the target branch
|
||||||
if len(pull_requests) > 0:
|
# that aren't on the source branch.
|
||||||
return get_merger_of_pr(repo, pull_requests[-1])
|
def get_commit_difference(repo, source_branch, target_branch):
|
||||||
|
# Passing split nothing means that the empty string splits to nothing: compare `''.split() == []`
|
||||||
# Otherwise take the author of the latest commit
|
# to `''.split('\n') == ['']`.
|
||||||
return other_commits[-1].author.login
|
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + target_branch + '..' + ORIGIN + '/' + source_branch).strip().split()
|
||||||
|
|
||||||
# Gets a list of the SHAs of all commits that have happened on main
|
|
||||||
# since the release branched off.
|
|
||||||
# This will not include any commits that exist on the release branch
|
|
||||||
# that aren't on main.
|
|
||||||
def get_commit_difference(repo):
|
|
||||||
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '..' + ORIGIN + '/' + MAIN_BRANCH).strip().split('\n')
|
|
||||||
|
|
||||||
# Convert to full-fledged commit objects
|
# Convert to full-fledged commit objects
|
||||||
commits = [repo.get_commit(c) for c in commits]
|
commits = [repo.get_commit(c) for c in commits]
|
||||||
@@ -136,7 +141,7 @@ def get_truncated_commit_message(commit):
|
|||||||
else:
|
else:
|
||||||
return message
|
return message
|
||||||
|
|
||||||
# Converts a commit into the PR that introduced it to the main branch.
|
# Converts a commit into the PR that introduced it to the source branch.
|
||||||
# Returns the PR object, or None if no PR could be found.
|
# Returns the PR object, or None if no PR could be found.
|
||||||
def get_pr_for_commit(repo, commit):
|
def get_pr_for_commit(repo, commit):
|
||||||
prs = commit.get_pulls()
|
prs = commit.get_pulls()
|
||||||
@@ -179,29 +184,65 @@ def update_changelog(version):
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
if len(sys.argv) != 3:
|
parser = argparse.ArgumentParser('update-release-branch.py')
|
||||||
raise Exception('Usage: update-release.branch.py <github token> <repository nwo>')
|
|
||||||
github_token = sys.argv[1]
|
|
||||||
repository_nwo = sys.argv[2]
|
|
||||||
|
|
||||||
repo = Github(github_token).get_repo(repository_nwo)
|
parser.add_argument(
|
||||||
|
'--github-token',
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help='GitHub token, typically from GitHub Actions.'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--repository-nwo',
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help='The nwo of the repository, for example github/codeql-action.'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--mode',
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
choices=[V2_MODE, V1_MODE],
|
||||||
|
help=f"Which release to perform. '{V2_MODE}' uses {SOURCE_BRANCH_FOR_MODE[V2_MODE]} as the source " +
|
||||||
|
f"branch and {TARGET_BRANCH_FOR_MODE[V2_MODE]} as the target branch. " +
|
||||||
|
f"'{V1_MODE}' uses {SOURCE_BRANCH_FOR_MODE[V1_MODE]} as the source branch and " +
|
||||||
|
f"{TARGET_BRANCH_FOR_MODE[V1_MODE]} as the target branch."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--conductor',
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help='The GitHub handle of the person who is conducting the release process.'
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
source_branch = SOURCE_BRANCH_FOR_MODE[args.mode]
|
||||||
|
target_branch = TARGET_BRANCH_FOR_MODE[args.mode]
|
||||||
|
|
||||||
|
repo = Github(args.github_token).get_repo(args.repository_nwo)
|
||||||
version = get_current_version()
|
version = get_current_version()
|
||||||
|
|
||||||
|
if args.mode == V1_MODE:
|
||||||
|
# Change the version number to a v1 equivalent
|
||||||
|
version = get_current_version()
|
||||||
|
version = f'1{version[1:]}'
|
||||||
|
|
||||||
# Print what we intend to go
|
# Print what we intend to go
|
||||||
print('Considering difference between ' + MAIN_BRANCH + ' and ' + LATEST_RELEASE_BRANCH)
|
print('Considering difference between ' + source_branch + ' and ' + target_branch)
|
||||||
short_main_sha = run_git('rev-parse', '--short', ORIGIN + '/' + MAIN_BRANCH).strip()
|
source_branch_short_sha = run_git('rev-parse', '--short', ORIGIN + '/' + source_branch).strip()
|
||||||
print('Current head of ' + MAIN_BRANCH + ' is ' + short_main_sha)
|
print('Current head of ' + source_branch + ' is ' + source_branch_short_sha)
|
||||||
|
|
||||||
# See if there are any commits to merge in
|
# See if there are any commits to merge in
|
||||||
commits = get_commit_difference(repo)
|
commits = get_commit_difference(repo=repo, source_branch=source_branch, target_branch=target_branch)
|
||||||
if len(commits) == 0:
|
if len(commits) == 0:
|
||||||
print('No commits to merge from ' + MAIN_BRANCH + ' to ' + LATEST_RELEASE_BRANCH)
|
print('No commits to merge from ' + source_branch + ' to ' + target_branch)
|
||||||
return
|
return
|
||||||
|
|
||||||
# The branch name is based off of the name of branch being merged into
|
# The branch name is based off of the name of branch being merged into
|
||||||
# and the SHA of the branch being merged from. Thus if the branch already
|
# and the SHA of the branch being merged from. Thus if the branch already
|
||||||
# exists we can assume we don't need to recreate it.
|
# exists we can assume we don't need to recreate it.
|
||||||
new_branch_name = 'update-v' + version + '-' + short_main_sha
|
new_branch_name = 'update-v' + version + '-' + source_branch_short_sha
|
||||||
print('Branch name is ' + new_branch_name)
|
print('Branch name is ' + new_branch_name)
|
||||||
|
|
||||||
# Check if the branch already exists. If so we can abort as this script
|
# Check if the branch already exists. If so we can abort as this script
|
||||||
@@ -212,19 +253,90 @@ def main():
|
|||||||
|
|
||||||
# Create the new branch and push it to the remote
|
# Create the new branch and push it to the remote
|
||||||
print('Creating branch ' + new_branch_name)
|
print('Creating branch ' + new_branch_name)
|
||||||
run_git('checkout', '-b', new_branch_name, ORIGIN + '/' + MAIN_BRANCH)
|
|
||||||
|
|
||||||
print('Updating changelog')
|
# The process of creating the v1 release can run into merge conflicts. We commit the unresolved
|
||||||
update_changelog(version)
|
# conflicts so a maintainer can easily resolve them (vs erroring and requiring maintainers to
|
||||||
|
# reconstruct the release manually)
|
||||||
|
conflicted_files = []
|
||||||
|
|
||||||
# Create a commit that updates the CHANGELOG
|
if args.mode == V1_MODE:
|
||||||
run_git('add', 'CHANGELOG.md')
|
# If we're performing a backport, start from the target branch
|
||||||
run_git('commit', '-m', version)
|
print(f'Creating {new_branch_name} from the {ORIGIN}/{target_branch} branch')
|
||||||
|
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/{target_branch}')
|
||||||
|
|
||||||
|
# Revert the commit that we made as part of the last release that updated the version number and
|
||||||
|
# changelog to refer to 1.x.x variants. This avoids merge conflicts in the changelog and
|
||||||
|
# package.json files when we merge in the v2 branch.
|
||||||
|
# This commit will not exist the first time we release the v1 branch from the v2 branch, so we
|
||||||
|
# use `git log --grep` to conditionally revert the commit.
|
||||||
|
print('Reverting the 1.x.x version number and changelog updates from the last release to avoid conflicts')
|
||||||
|
v1_update_commits = run_git('log', '--grep', '^Update version and changelog for v', '--format=%H').split()
|
||||||
|
|
||||||
|
if len(v1_update_commits) > 0:
|
||||||
|
print(f' Reverting {v1_update_commits[0]}')
|
||||||
|
# Only revert the newest commit as older ones will already have been reverted in previous
|
||||||
|
# releases.
|
||||||
|
run_git('revert', v1_update_commits[0], '--no-edit')
|
||||||
|
|
||||||
|
# Also revert the "Update checked-in dependencies" commit created by Actions.
|
||||||
|
update_dependencies_commit = run_git('log', '--grep', '^Update checked-in dependencies', '--format=%H').split()[0]
|
||||||
|
print(f' Reverting {update_dependencies_commit}')
|
||||||
|
run_git('revert', update_dependencies_commit, '--no-edit')
|
||||||
|
|
||||||
|
else:
|
||||||
|
print(' Nothing to revert.')
|
||||||
|
|
||||||
|
print(f'Merging {ORIGIN}/{source_branch} into the release prep branch')
|
||||||
|
# Commit any conflicts (see the comment for `conflicted_files`)
|
||||||
|
run_git('merge', f'{ORIGIN}/{source_branch}', allow_non_zero_exit_code=True)
|
||||||
|
conflicted_files = run_git('diff', '--name-only', '--diff-filter', 'U').splitlines()
|
||||||
|
if len(conflicted_files) > 0:
|
||||||
|
run_git('add', '.')
|
||||||
|
run_git('commit', '--no-edit')
|
||||||
|
|
||||||
|
# Migrate the package version number from a v2 version number to a v1 version number
|
||||||
|
print(f'Setting version number to {version}')
|
||||||
|
subprocess.run(['npm', 'version', version, '--no-git-tag-version'])
|
||||||
|
run_git('add', 'package.json', 'package-lock.json')
|
||||||
|
|
||||||
|
# Migrate the changelog notes from v2 version numbers to v1 version numbers
|
||||||
|
print('Migrating changelog notes from v2 to v1')
|
||||||
|
subprocess.run(['sed', '-i', 's/^## 2\./## 1./g', 'CHANGELOG.md'])
|
||||||
|
|
||||||
|
# Remove changelog notes from v2 that don't apply to v1
|
||||||
|
subprocess.run(['sed', '-i', '/^- \[v2+ only\]/d', 'CHANGELOG.md'])
|
||||||
|
|
||||||
|
# Amend the commit generated by `npm version` to update the CHANGELOG
|
||||||
|
run_git('add', 'CHANGELOG.md')
|
||||||
|
run_git('commit', '-m', f'Update version and changelog for v{version}')
|
||||||
|
else:
|
||||||
|
# If we're performing a standard release, there won't be any new commits on the target branch,
|
||||||
|
# as these will have already been merged back into the source branch. Therefore we can just
|
||||||
|
# start from the source branch.
|
||||||
|
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/{source_branch}')
|
||||||
|
|
||||||
|
print('Updating changelog')
|
||||||
|
update_changelog(version)
|
||||||
|
|
||||||
|
# Create a commit that updates the CHANGELOG
|
||||||
|
run_git('add', 'CHANGELOG.md')
|
||||||
|
run_git('commit', '-m', f'Update changelog for v{version}')
|
||||||
|
|
||||||
run_git('push', ORIGIN, new_branch_name)
|
run_git('push', ORIGIN, new_branch_name)
|
||||||
|
|
||||||
# Open a PR to update the branch
|
# Open a PR to update the branch
|
||||||
open_pr(repo, commits, short_main_sha, new_branch_name)
|
open_pr(
|
||||||
|
repo,
|
||||||
|
commits,
|
||||||
|
source_branch_short_sha,
|
||||||
|
new_branch_name,
|
||||||
|
source_branch=source_branch,
|
||||||
|
target_branch=target_branch,
|
||||||
|
conductor=args.conductor,
|
||||||
|
is_v2_release=args.mode == V2_MODE,
|
||||||
|
labels=['Update dependencies'] if args.mode == V1_MODE else [],
|
||||||
|
conflicted_files=conflicted_files
|
||||||
|
)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
7
.github/workflows/__analyze-ref-input.yml
generated
vendored
7
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -65,11 +66,11 @@ jobs:
|
|||||||
- os: windows-2022
|
- os: windows-2022
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
name: "Analyze: 'ref' and 'sha' from inputs"
|
name: "Analyze: 'ref' and 'sha' from inputs"
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
9
.github/workflows/__debug-artifacts.yml
generated
vendored
9
.github/workflows/__debug-artifacts.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -49,11 +50,11 @@ jobs:
|
|||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
name: Debug artifact upload
|
name: Debug artifact upload
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
@@ -70,7 +71,7 @@ jobs:
|
|||||||
run: ./build.sh
|
run: ./build.sh
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
id: analysis
|
id: analysis
|
||||||
- uses: actions/download-artifact@v2
|
- uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: my-debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
|
name: my-debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
|
||||||
- shell: bash
|
- shell: bash
|
||||||
|
|||||||
7
.github/workflows/__extractor-ram-threads.yml
generated
vendored
7
.github/workflows/__extractor-ram-threads.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -27,11 +28,11 @@ jobs:
|
|||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
version: latest
|
version: latest
|
||||||
name: Extractor ram and threads options test
|
name: Extractor ram and threads options test
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
9
.github/workflows/__go-custom-queries.yml
generated
vendored
9
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -65,17 +66,17 @@ jobs:
|
|||||||
- os: windows-2022
|
- os: windows-2022
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
name: 'Go: Custom queries'
|
name: 'Go: Custom queries'
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
with:
|
with:
|
||||||
version: ${{ matrix.version }}
|
version: ${{ matrix.version }}
|
||||||
- uses: actions/setup-go@v2
|
- uses: actions/setup-go@v3
|
||||||
with:
|
with:
|
||||||
go-version: ^1.13.1
|
go-version: ^1.13.1
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
|
|||||||
9
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
9
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -49,17 +50,17 @@ jobs:
|
|||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
name: 'Go: Autobuild custom tracing'
|
name: 'Go: Autobuild custom tracing'
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
with:
|
with:
|
||||||
version: ${{ matrix.version }}
|
version: ${{ matrix.version }}
|
||||||
- uses: actions/setup-go@v2
|
- uses: actions/setup-go@v3
|
||||||
with:
|
with:
|
||||||
go-version: ^1.13.1
|
go-version: ^1.13.1
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
|
|||||||
9
.github/workflows/__go-custom-tracing.yml
generated
vendored
9
.github/workflows/__go-custom-tracing.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -65,17 +66,17 @@ jobs:
|
|||||||
- os: windows-2022
|
- os: windows-2022
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
name: 'Go: Custom tracing'
|
name: 'Go: Custom tracing'
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
with:
|
with:
|
||||||
version: ${{ matrix.version }}
|
version: ${{ matrix.version }}
|
||||||
- uses: actions/setup-go@v2
|
- uses: actions/setup-go@v3
|
||||||
with:
|
with:
|
||||||
go-version: ^1.13.1
|
go-version: ^1.13.1
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
|
|||||||
7
.github/workflows/__javascript-source-root.yml
generated
vendored
7
.github/workflows/__javascript-source-root.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -31,11 +32,11 @@ jobs:
|
|||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
name: Custom source root
|
name: Custom source root
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
119
.github/workflows/__ml-powered-queries.yml
generated
vendored
Normal file
119
.github/workflows/__ml-powered-queries.yml
generated
vendored
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - ML-powered queries
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
ml-powered-queries:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: stable-20220120
|
||||||
|
- os: macos-latest
|
||||||
|
version: stable-20220120
|
||||||
|
- os: windows-latest
|
||||||
|
version: stable-20220120
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: cached
|
||||||
|
- os: macos-latest
|
||||||
|
version: cached
|
||||||
|
- os: windows-latest
|
||||||
|
version: cached
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: latest
|
||||||
|
- os: macos-latest
|
||||||
|
version: latest
|
||||||
|
- os: windows-latest
|
||||||
|
version: latest
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: nightly-latest
|
||||||
|
- os: macos-latest
|
||||||
|
version: nightly-latest
|
||||||
|
- os: windows-latest
|
||||||
|
version: nightly-latest
|
||||||
|
name: ML-powered queries
|
||||||
|
timeout-minutes: 45
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: javascript
|
||||||
|
queries: security-extended
|
||||||
|
source-root: ./../action/tests/ml-powered-queries-repo
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
output: ${{ runner.temp }}/results
|
||||||
|
upload-database: false
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
- name: Upload SARIF
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ml-powered-queries-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||||
|
path: ${{ runner.temp }}/results/javascript.sarif
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
- name: Check results
|
||||||
|
env:
|
||||||
|
IS_WINDOWS: ${{ matrix.os == 'windows-latest' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd "$RUNNER_TEMP/results"
|
||||||
|
# We should run at least the ML-powered queries in `expected_rules`.
|
||||||
|
expected_rules="js/ml-powered/nosql-injection js/ml-powered/path-injection js/ml-powered/sql-injection js/ml-powered/xss"
|
||||||
|
|
||||||
|
for rule in ${expected_rules}; do
|
||||||
|
found_rule=$(jq --arg rule "${rule}" '[.runs[0].tool.extensions[].rules | select(. != null) |
|
||||||
|
flatten | .[].id] | any(. == $rule)' javascript.sarif)
|
||||||
|
echo "Did find rule '${rule}': ${found_rule}"
|
||||||
|
if [[ "${found_rule}" != "true" && "${IS_WINDOWS}" != "true" ]]; then
|
||||||
|
echo "Expected SARIF output to contain rule '${rule}', but found no such rule."
|
||||||
|
exit 1
|
||||||
|
elif [[ "${found_rule}" == "true" && "${IS_WINDOWS}" == "true" ]]; then
|
||||||
|
echo "Found rule '${rule}' in the SARIF output which shouldn't have been part of the analysis."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# We should have at least one alert from an ML-powered query.
|
||||||
|
num_alerts=$(jq '[.runs[0].results[] |
|
||||||
|
select(.properties.score != null and (.rule.id | startswith("js/ml-powered/")))] | length' \
|
||||||
|
javascript.sarif)
|
||||||
|
echo "Found ${num_alerts} alerts from ML-powered queries.";
|
||||||
|
if [[ "${num_alerts}" -eq 0 && "${IS_WINDOWS}" != "true" ]]; then
|
||||||
|
echo "Expected to find at least one alert from an ML-powered query but found ${num_alerts}."
|
||||||
|
exit 1
|
||||||
|
elif [[ "${num_alerts}" -ne 0 && "${IS_WINDOWS}" == "true" ]]; then
|
||||||
|
echo "Expected not to find any alerts from an ML-powered query but found ${num_alerts}."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
7
.github/workflows/__multi-language-autodetect.yml
generated
vendored
7
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -49,11 +50,11 @@ jobs:
|
|||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
name: Multi-language repository
|
name: Multi-language repository
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
7
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
7
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -29,11 +30,11 @@ jobs:
|
|||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: nightly-20210831
|
version: nightly-20210831
|
||||||
name: 'Packaging: Config and input'
|
name: 'Packaging: Config and input'
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
7
.github/workflows/__packaging-config-js.yml
generated
vendored
7
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -29,11 +30,11 @@ jobs:
|
|||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: nightly-20210831
|
version: nightly-20210831
|
||||||
name: 'Packaging: Config file'
|
name: 'Packaging: Config file'
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
7
.github/workflows/__packaging-inputs-js.yml
generated
vendored
7
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -29,11 +30,11 @@ jobs:
|
|||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: nightly-20210831
|
version: nightly-20210831
|
||||||
name: 'Packaging: Action input'
|
name: 'Packaging: Action input'
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
7
.github/workflows/__remote-config.yml
generated
vendored
7
.github/workflows/__remote-config.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -65,11 +66,11 @@ jobs:
|
|||||||
- os: windows-2022
|
- os: windows-2022
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
name: Remote config file
|
name: Remote config file
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
7
.github/workflows/__rubocop-multi-language.yml
generated
vendored
7
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -37,11 +38,11 @@ jobs:
|
|||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
name: RuboCop multi-language
|
name: RuboCop multi-language
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
7
.github/workflows/__split-workflow.yml
generated
vendored
7
.github/workflows/__split-workflow.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -29,11 +30,11 @@ jobs:
|
|||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: nightly-20210831
|
version: nightly-20210831
|
||||||
name: Split workflow
|
name: Split workflow
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
67
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
Normal file
67
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Autobuild working directory
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
test-autobuild-working-dir:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: latest
|
||||||
|
name: Autobuild working directory
|
||||||
|
timeout-minutes: 45
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- name: Test setup
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
# Make sure that Gradle build succeeds in autobuild-dir ...
|
||||||
|
cp -a ../action/tests/java-repo autobuild-dir
|
||||||
|
# ... and fails if attempted in the current directory
|
||||||
|
echo > build.gradle
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: java
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- uses: ./../action/autobuild
|
||||||
|
with:
|
||||||
|
working-directory: autobuild-dir
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
- name: Check database
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd "$RUNNER_TEMP/codeql_databases"
|
||||||
|
if [[ ! -d java ]]; then
|
||||||
|
echo "Did not find a Java database"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
7
.github/workflows/__test-local-codeql.yml
generated
vendored
7
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -27,11 +28,11 @@ jobs:
|
|||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
name: Local CodeQL bundle
|
name: Local CodeQL bundle
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
7
.github/workflows/__test-proxy.yml
generated
vendored
7
.github/workflows/__test-proxy.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -27,11 +28,11 @@ jobs:
|
|||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
version: latest
|
version: latest
|
||||||
name: Proxy test
|
name: Proxy test
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
7
.github/workflows/__test-ruby.yml
generated
vendored
7
.github/workflows/__test-ruby.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -37,11 +38,11 @@ jobs:
|
|||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
name: Ruby analysis
|
name: Ruby analysis
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
7
.github/workflows/__unset-environment.yml
generated
vendored
7
.github/workflows/__unset-environment.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -37,11 +38,11 @@ jobs:
|
|||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
name: Test unsetting environment variables
|
name: Test unsetting environment variables
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
7
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
7
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -11,7 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -65,11 +66,11 @@ jobs:
|
|||||||
- os: windows-2022
|
- os: windows-2022
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Prepare test
|
- name: Prepare test
|
||||||
id: prepare-test
|
id: prepare-test
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
|
|||||||
146
.github/workflows/__with-checkout-path.yml
generated
vendored
Normal file
146
.github/workflows/__with-checkout-path.yml
generated
vendored
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Use a custom `checkout_path`
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
with-checkout-path:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: stable-20210308
|
||||||
|
- os: macos-latest
|
||||||
|
version: stable-20210308
|
||||||
|
- os: windows-2019
|
||||||
|
version: stable-20210308
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: stable-20210319
|
||||||
|
- os: macos-latest
|
||||||
|
version: stable-20210319
|
||||||
|
- os: windows-2019
|
||||||
|
version: stable-20210319
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: stable-20210809
|
||||||
|
- os: macos-latest
|
||||||
|
version: stable-20210809
|
||||||
|
- os: windows-2019
|
||||||
|
version: stable-20210809
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: cached
|
||||||
|
- os: macos-latest
|
||||||
|
version: cached
|
||||||
|
- os: windows-2019
|
||||||
|
version: cached
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: latest
|
||||||
|
- os: macos-latest
|
||||||
|
version: latest
|
||||||
|
- os: windows-2019
|
||||||
|
version: latest
|
||||||
|
- os: windows-2022
|
||||||
|
version: latest
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: nightly-latest
|
||||||
|
- os: macos-latest
|
||||||
|
version: nightly-latest
|
||||||
|
- os: windows-2019
|
||||||
|
version: nightly-latest
|
||||||
|
- os: windows-2022
|
||||||
|
version: nightly-latest
|
||||||
|
name: Use a custom `checkout_path`
|
||||||
|
timeout-minutes: 45
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||||
|
path: x/y/z/some-path
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
# it's enough to test one compiled language and one interpreted language
|
||||||
|
languages: csharp,javascript
|
||||||
|
source-path: x/y/z/some-path/tests/multi-language-repo
|
||||||
|
debug: true
|
||||||
|
- name: Build code (non-windows)
|
||||||
|
shell: bash
|
||||||
|
if: ${{ runner.os != 'Windows' }}
|
||||||
|
run: |
|
||||||
|
$CODEQL_RUNNER x/y/z/some-path/tests/multi-language-repo/build.sh
|
||||||
|
- name: Build code (windows)
|
||||||
|
shell: bash
|
||||||
|
if: ${{ runner.os == 'Windows' }}
|
||||||
|
run: |
|
||||||
|
x/y/z/some-path/tests/multi-language-repo/build.sh
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
checkout_path: x/y/z/some-path/tests/multi-language-repo
|
||||||
|
ref: v1.1.0
|
||||||
|
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||||
|
upload: false
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
- uses: ./../action/upload-sarif
|
||||||
|
with:
|
||||||
|
ref: v1.1.0
|
||||||
|
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||||
|
checkout_path: x/y/z/some-path/tests/multi-language-repo
|
||||||
|
env:
|
||||||
|
TEST_MODE: true
|
||||||
|
|
||||||
|
- name: Verify SARIF after upload
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
EXPECTED_COMMIT_OID="474bbf07f9247ffe1856c6a0f94aeeb10e7afee6"
|
||||||
|
EXPECTED_REF="v1.1.0"
|
||||||
|
EXPECTED_CHECKOUT_URI_SUFFIX="/x/y/z/some-path/tests/multi-language-repo"
|
||||||
|
|
||||||
|
ACTUAL_COMMIT_OID="$(cat "$RUNNER_TEMP/payload.json" | jq -r .commit_oid)"
|
||||||
|
ACTUAL_REF="$(cat "$RUNNER_TEMP/payload.json" | jq -r .ref)"
|
||||||
|
ACTUAL_CHECKOUT_URI="$(cat "$RUNNER_TEMP/payload.json" | jq -r .checkout_uri)"
|
||||||
|
|
||||||
|
if [[ "$EXPECTED_COMMIT_OID" != "$ACTUAL_COMMIT_OID" ]]; then
|
||||||
|
echo "::error Invalid commit oid. Expected: $EXPECTED_COMMIT_OID Actual: $ACTUAL_COMMIT_OID"
|
||||||
|
echo "$RUNNER_TEMP/payload.json"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$EXPECTED_REF" != "$ACTUAL_REF" ]]; then
|
||||||
|
echo "::error Invalid ref. Expected: '$EXPECTED_REF' Actual: '$ACTUAL_REF'"
|
||||||
|
echo "$RUNNER_TEMP/payload.json"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$ACTUAL_CHECKOUT_URI" != *$EXPECTED_CHECKOUT_URI_SUFFIX ]]; then
|
||||||
|
echo "::error Invalid checkout URI suffix. Expected suffix: $EXPECTED_CHECKOUT_URI_SUFFIX Actual uri: $ACTUAL_CHECKOUT_URI"
|
||||||
|
echo "$RUNNER_TEMP/payload.json"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout CodeQL Action
|
- name: Checkout CodeQL Action
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Check Expected Release Files
|
- name: Check Expected Release Files
|
||||||
run: |
|
run: |
|
||||||
bundle_version="$(cat "./src/defaults.json" | jq -r ".bundleVersion")"
|
bundle_version="$(cat "./src/defaults.json" | jq -r ".bundleVersion")"
|
||||||
|
|||||||
31
.github/workflows/check-for-conflicts.yml
vendored
Normal file
31
.github/workflows/check-for-conflicts.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# Checks for any conflict markers created by git. This check is primarily intended to validate that
|
||||||
|
# any merge conflicts in the v2 -> v1 backport PR are fixed before the PR is merged.
|
||||||
|
name: Check for conflicts
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [main, v1, v2]
|
||||||
|
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||||
|
# by other workflows.
|
||||||
|
types: [opened, synchronize, reopened, ready_for_review]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-for-conflicts:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Check for conflicts
|
||||||
|
run: |
|
||||||
|
# Use `|| true` since grep returns exit code 1 if there are no matches, and we don't want
|
||||||
|
# this to fail the workflow.
|
||||||
|
FILES_WITH_CONFLICTS=$(grep --extended-regexp --ignore-case --line-number --recursive \
|
||||||
|
'^(<<<<<<<|>>>>>>>)' . || true)
|
||||||
|
if [[ "${FILES_WITH_CONFLICTS}" ]]; then
|
||||||
|
echo "Fail: Found merge conflict markers in the following files:"
|
||||||
|
echo ""
|
||||||
|
echo "${FILES_WITH_CONFLICTS}"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "Success: Found no merge conflict markers."
|
||||||
|
fi
|
||||||
8
.github/workflows/codeql.yml
vendored
8
.github/workflows/codeql.yml
vendored
@@ -2,9 +2,9 @@ name: "CodeQL action"
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main, v1]
|
branches: [main, releases/v1, releases/v2]
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [main, v1]
|
branches: [main, releases/v1, releases/v2]
|
||||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||||
# by other workflows.
|
# by other workflows.
|
||||||
types: [opened, synchronize, reopened, ready_for_review]
|
types: [opened, synchronize, reopened, ready_for_review]
|
||||||
@@ -20,7 +20,7 @@ jobs:
|
|||||||
security-events: write
|
security-events: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Init with default CodeQL bundle from the VM image
|
- name: Init with default CodeQL bundle from the VM image
|
||||||
id: init-default
|
id: init-default
|
||||||
uses: ./init
|
uses: ./init
|
||||||
@@ -75,7 +75,7 @@ jobs:
|
|||||||
security-events: write
|
security-events: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: ./init
|
- uses: ./init
|
||||||
id: init
|
id: init
|
||||||
with:
|
with:
|
||||||
|
|||||||
92
.github/workflows/post-release-mergeback.yml
vendored
92
.github/workflows/post-release-mergeback.yml
vendored
@@ -1,7 +1,8 @@
|
|||||||
# This workflow runs after a release of the action.
|
# This workflow runs after a release of the action. For v2 releases, it merges any changes from the
|
||||||
# It merges any changes from the release back into the
|
# release back into the main branch. Typically, this is just a single commit that updates the
|
||||||
# main branch. Typically, this is just a single commit
|
# changelog. For v2 and v1 releases, it then (a) tags the merge commit on the release branch that
|
||||||
# that updates the changelog.
|
# represents the new release with an `vx.y.z` tag and (b) updates the `vx` tag to refer to this
|
||||||
|
# commit.
|
||||||
name: Tag release and merge back
|
name: Tag release and merge back
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@@ -14,7 +15,8 @@ on:
|
|||||||
|
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- v1
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
merge-back:
|
merge-back:
|
||||||
@@ -25,13 +27,16 @@ jobs:
|
|||||||
HEAD_BRANCH: "${{ github.head_ref || github.ref }}"
|
HEAD_BRANCH: "${{ github.head_ref || github.ref }}"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Dump GitHub Event context
|
- name: Dump environment
|
||||||
env:
|
run: env
|
||||||
GITHUB_EVENT_CONTEXT: "${{ toJson(github.event) }}"
|
|
||||||
run: echo "$GITHUB_EVENT_CONTEXT"
|
|
||||||
|
|
||||||
- uses: actions/checkout@v2
|
- name: Dump GitHub context
|
||||||
- uses: actions/setup-node@v2
|
env:
|
||||||
|
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||||
|
run: echo "${GITHUB_CONTEXT}"
|
||||||
|
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
|
||||||
- name: Update git config
|
- name: Update git config
|
||||||
run: |
|
run: |
|
||||||
@@ -42,25 +47,25 @@ jobs:
|
|||||||
id: getVersion
|
id: getVersion
|
||||||
run: |
|
run: |
|
||||||
VERSION="v$(jq '.version' -r 'package.json')"
|
VERSION="v$(jq '.version' -r 'package.json')"
|
||||||
SHORT_SHA="${GITHUB_SHA:0:8}"
|
echo "::set-output name=version::${VERSION}"
|
||||||
echo "::set-output name=version::$VERSION"
|
short_sha="${GITHUB_SHA:0:8}"
|
||||||
NEW_BRANCH="mergeback/${VERSION}-to-${BASE_BRANCH}-${SHORT_SHA}"
|
NEW_BRANCH="mergeback/${VERSION}-to-${BASE_BRANCH}-${short_sha}"
|
||||||
echo "::set-output name=newBranch::$NEW_BRANCH"
|
echo "::set-output name=newBranch::${NEW_BRANCH}"
|
||||||
|
|
||||||
|
|
||||||
- name: Dump branches
|
- name: Dump branches
|
||||||
env:
|
env:
|
||||||
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
||||||
run: |
|
run: |
|
||||||
echo "BASE_BRANCH $BASE_BRANCH"
|
echo "BASE_BRANCH ${BASE_BRANCH}"
|
||||||
echo "HEAD_BRANCH $HEAD_BRANCH"
|
echo "HEAD_BRANCH ${HEAD_BRANCH}"
|
||||||
echo "NEW_BRANCH $NEW_BRANCH"
|
echo "NEW_BRANCH ${NEW_BRANCH}"
|
||||||
|
|
||||||
- name: Create mergeback branch
|
- name: Create mergeback branch
|
||||||
env:
|
env:
|
||||||
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
||||||
run: |
|
run: |
|
||||||
git checkout -b "$NEW_BRANCH"
|
git checkout -b "${NEW_BRANCH}"
|
||||||
|
|
||||||
- name: Check for tag
|
- name: Check for tag
|
||||||
id: check
|
id: check
|
||||||
@@ -68,13 +73,13 @@ jobs:
|
|||||||
VERSION: "${{ steps.getVersion.outputs.version }}"
|
VERSION: "${{ steps.getVersion.outputs.version }}"
|
||||||
run: |
|
run: |
|
||||||
set +e # don't fail on an errored command
|
set +e # don't fail on an errored command
|
||||||
git ls-remote --tags origin | grep "$VERSION"
|
git ls-remote --tags origin | grep "${VERSION}"
|
||||||
EXISTS="$?"
|
exists="$?"
|
||||||
if [ "$EXISTS" -eq 0 ]; then
|
if [ "${exists}" -eq 0 ]; then
|
||||||
echo "Tag $TAG exists. Not going to re-release."
|
echo "Tag ${VERSION} exists. Not going to re-release."
|
||||||
echo "::set-output name=exists::true"
|
echo "::set-output name=exists::true"
|
||||||
else
|
else
|
||||||
echo "Tag $TAG does not exist yet."
|
echo "Tag ${VERSION} does not exist yet."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# we didn't tag the release during the update-release-branch workflow because the
|
# we didn't tag the release during the update-release-branch workflow because the
|
||||||
@@ -85,35 +90,48 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
VERSION: ${{ steps.getVersion.outputs.version }}
|
VERSION: ${{ steps.getVersion.outputs.version }}
|
||||||
run: |
|
run: |
|
||||||
git tag -a "$VERSION" -m "$VERSION"
|
# Unshallow the repo in order to allow pushes
|
||||||
git fetch --unshallow # unshallow the repo in order to allow pushes
|
git fetch --unshallow
|
||||||
git push origin --follow-tags "$VERSION"
|
# Create the `vx.y.z` tag
|
||||||
|
git tag --annotate "${VERSION}" --message "${VERSION}"
|
||||||
|
# Update the `vx` tag
|
||||||
|
major_version_tag=$(cut -d '.' -f1 <<< "${VERSION}")
|
||||||
|
# Use `--force` to overwrite the major version tag
|
||||||
|
git tag --annotate "${major_version_tag}" --message "${major_version_tag}" --force
|
||||||
|
# Push the tags, using:
|
||||||
|
# - `--atomic` to make sure we either update both tags or neither (an intermediate state,
|
||||||
|
# e.g. where we update the v2.x.y tag on the remote but not the v2 tag, could result in
|
||||||
|
# unwanted Dependabot updates, e.g. from v2 to v2.x.y)
|
||||||
|
# - `--force` since we're overwriting the `vx` tag
|
||||||
|
git push origin --atomic --force refs/tags/"${VERSION}" refs/tags/"${major_version_tag}"
|
||||||
|
|
||||||
- name: Create mergeback branch
|
- name: Create mergeback branch
|
||||||
if: steps.check.outputs.exists != 'true'
|
if: steps.check.outputs.exists != 'true' && contains(github.ref, 'releases/v2')
|
||||||
env:
|
env:
|
||||||
VERSION: "${{ steps.getVersion.outputs.version }}"
|
VERSION: "${{ steps.getVersion.outputs.version }}"
|
||||||
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
||||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
run: |
|
run: |
|
||||||
set -exu
|
set -exu
|
||||||
PR_TITLE="Mergeback $VERSION $HEAD_BRANCH into $BASE_BRANCH"
|
pr_title="Mergeback ${VERSION} ${HEAD_BRANCH} into ${BASE_BRANCH}"
|
||||||
PR_BODY="Updates version and changelog."
|
pr_body="Updates version and changelog."
|
||||||
|
|
||||||
|
# Update the version number ready for the next release
|
||||||
|
npm version patch --no-git-tag-version
|
||||||
|
|
||||||
# Update the changelog
|
# Update the changelog
|
||||||
perl -i -pe 's/^/## \[UNRELEASED\]\n\nNo user facing changes.\n\n/ if($.==3)' CHANGELOG.md
|
perl -i -pe 's/^/## \[UNRELEASED\]\n\nNo user facing changes.\n\n/ if($.==3)' CHANGELOG.md
|
||||||
git add .
|
git add .
|
||||||
git commit -m "Update changelog and version after $VERSION"
|
git commit -m "Update changelog and version after ${VERSION}"
|
||||||
npm version patch
|
|
||||||
|
|
||||||
git push origin "$NEW_BRANCH"
|
git push origin "${NEW_BRANCH}"
|
||||||
|
|
||||||
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft
|
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft
|
||||||
# so that a maintainer can take the PR out of draft, thereby triggering the PR checks.
|
# so that a maintainer can take the PR out of draft, thereby triggering the PR checks.
|
||||||
gh pr create \
|
gh pr create \
|
||||||
--head "$NEW_BRANCH" \
|
--head "${NEW_BRANCH}" \
|
||||||
--base "$BASE_BRANCH" \
|
--base "${BASE_BRANCH}" \
|
||||||
--title "$PR_TITLE" \
|
--title "${pr_title}" \
|
||||||
--label "Update dependencies" \
|
--label "Update dependencies" \
|
||||||
--body "$PR_BODY" \
|
--body "${pr_body}" \
|
||||||
--draft
|
--draft
|
||||||
|
|||||||
98
.github/workflows/pr-checks.yml
vendored
98
.github/workflows/pr-checks.yml
vendored
@@ -2,7 +2,7 @@ name: PR Checks (Basic Checks and Runner)
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main, v1]
|
branches: [main, releases/v1, releases/v2]
|
||||||
pull_request:
|
pull_request:
|
||||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||||
# by other workflows.
|
# by other workflows.
|
||||||
@@ -13,41 +13,69 @@ jobs:
|
|||||||
lint-js:
|
lint-js:
|
||||||
name: Lint
|
name: Lint
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Run Lint
|
- name: Run Lint
|
||||||
run: npm run-script lint
|
run: npm run-script lint
|
||||||
|
|
||||||
check-js:
|
check-js:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: true
|
||||||
|
matrix:
|
||||||
|
node-types-version: [12.12, current]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Update version of @types/node
|
||||||
|
if: matrix.node-types-version != 'current'
|
||||||
|
env:
|
||||||
|
NODE_TYPES_VERSION: ${{ matrix.node-types-version }}
|
||||||
|
run: |
|
||||||
|
# Export `NODE_TYPES_VERSION` so it's available to jq
|
||||||
|
export NODE_TYPES_VERSION="${NODE_TYPES_VERSION}"
|
||||||
|
contents=$(jq '.devDependencies."@types/node" = env.NODE_TYPES_VERSION' package.json)
|
||||||
|
echo "${contents}" > package.json
|
||||||
|
# Usually we run `npm install` on macOS to ensure that we pick up macOS-only dependencies.
|
||||||
|
# However we're not checking in the updated lockfile here, so it's fine to run
|
||||||
|
# `npm install` on Linux.
|
||||||
|
npm install
|
||||||
|
|
||||||
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
|
git config --global user.email "github-actions@github.com"
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
# The period in `git add --all .` ensures that we stage deleted files too.
|
||||||
|
git add --all .
|
||||||
|
git commit -m "Use @types/node=${NODE_TYPES_VERSION}"
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Check generated JS
|
- name: Check generated JS
|
||||||
run: .github/workflows/script/check-js.sh
|
run: .github/workflows/script/check-js.sh
|
||||||
|
|
||||||
check-node-modules:
|
check-node-modules:
|
||||||
name: Check modules up to date
|
name: Check modules up to date
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Check node modules up to date
|
- name: Check node modules up to date
|
||||||
run: .github/workflows/script/check-node-modules.sh
|
run: .github/workflows/script/check-node-modules.sh
|
||||||
|
|
||||||
verify-pr-checks:
|
verify-pr-checks:
|
||||||
name: Verify PR checks up to date
|
name: Verify PR checks up to date
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v3
|
||||||
with:
|
with:
|
||||||
python-version: 3.8
|
python-version: 3.8
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -64,21 +92,21 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, macos-latest]
|
os: [ubuntu-latest, macos-latest]
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: npm run-script test
|
- name: npm run-script test
|
||||||
run: npm run-script test
|
run: npm run-script test
|
||||||
|
|
||||||
runner-analyze-javascript-ubuntu:
|
runner-analyze-javascript-ubuntu:
|
||||||
name: Runner ubuntu JS analyze
|
name: Runner ubuntu JS analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
@@ -103,11 +131,11 @@ jobs:
|
|||||||
runner-analyze-javascript-windows:
|
runner-analyze-javascript-windows:
|
||||||
name: Runner windows JS analyze
|
name: Runner windows JS analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
@@ -128,11 +156,11 @@ jobs:
|
|||||||
runner-analyze-javascript-macos:
|
runner-analyze-javascript-macos:
|
||||||
name: Runner macos JS analyze
|
name: Runner macos JS analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
@@ -153,11 +181,11 @@ jobs:
|
|||||||
runner-analyze-csharp-ubuntu:
|
runner-analyze-csharp-ubuntu:
|
||||||
name: Runner ubuntu C# analyze
|
name: Runner ubuntu C# analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Move codeql-action
|
- name: Move codeql-action
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -193,11 +221,11 @@ jobs:
|
|||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
||||||
# `windows-latest`.
|
# `windows-latest`.
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: windows-2019
|
runs-on: windows-2019
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Move codeql-action
|
- name: Move codeql-action
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -225,7 +253,7 @@ jobs:
|
|||||||
& $Env:CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
|
& $Env:CODEQL_RUNNER dotnet build /p:UseSharedCompilation=false
|
||||||
|
|
||||||
- name: Upload tracer logs
|
- name: Upload tracer logs
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: tracer-logs
|
name: tracer-logs
|
||||||
path: ./codeql-runner/compound-build-tracer.log
|
path: ./codeql-runner/compound-build-tracer.log
|
||||||
@@ -238,12 +266,12 @@ jobs:
|
|||||||
|
|
||||||
runner-analyze-csharp-macos:
|
runner-analyze-csharp-macos:
|
||||||
name: Runner macos C# analyze
|
name: Runner macos C# analyze
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Move codeql-action
|
- name: Move codeql-action
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -277,12 +305,12 @@ jobs:
|
|||||||
|
|
||||||
runner-analyze-csharp-autobuild-ubuntu:
|
runner-analyze-csharp-autobuild-ubuntu:
|
||||||
name: Runner ubuntu autobuild C# analyze
|
name: Runner ubuntu autobuild C# analyze
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Move codeql-action
|
- name: Move codeql-action
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -313,7 +341,7 @@ jobs:
|
|||||||
TEST_MODE: true
|
TEST_MODE: true
|
||||||
|
|
||||||
runner-analyze-csharp-autobuild-windows:
|
runner-analyze-csharp-autobuild-windows:
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
name: Runner windows autobuild C# analyze
|
name: Runner windows autobuild C# analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
# Build tracing currently does not support Windows 2022, so use `windows-2019` instead of
|
||||||
@@ -321,7 +349,7 @@ jobs:
|
|||||||
runs-on: windows-2019
|
runs-on: windows-2019
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Move codeql-action
|
- name: Move codeql-action
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -356,10 +384,10 @@ jobs:
|
|||||||
name: Runner macos autobuild C# analyze
|
name: Runner macos autobuild C# analyze
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Move codeql-action
|
- name: Move codeql-action
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -394,12 +422,12 @@ jobs:
|
|||||||
name: Runner upload sarif
|
name: Runner upload sarif
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
|
|
||||||
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
|
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
@@ -417,10 +445,10 @@ jobs:
|
|||||||
name: Runner ubuntu extractor RAM and threads options
|
name: Runner ubuntu extractor RAM and threads options
|
||||||
needs: [check-js, check-node-modules]
|
needs: [check-js, check-node-modules]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Build runner
|
- name: Build runner
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
12
.github/workflows/python-deps.yml
vendored
12
.github/workflows/python-deps.yml
vendored
@@ -2,7 +2,7 @@ name: Test Python Package Installation on Linux and Mac
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main, v1]
|
branches: [main, releases/v1, releases/v2]
|
||||||
pull_request:
|
pull_request:
|
||||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||||
# by other workflows.
|
# by other workflows.
|
||||||
@@ -10,7 +10,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-setup-python-scripts:
|
test-setup-python-scripts:
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
@@ -25,7 +25,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: ./init
|
uses: ./init
|
||||||
@@ -71,7 +71,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: ./init
|
uses: ./init
|
||||||
@@ -122,9 +122,9 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v3
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python_version }}
|
python-version: ${{ matrix.python_version }}
|
||||||
|
|
||||||
|
|||||||
55
.github/workflows/release-runner.yml
vendored
55
.github/workflows/release-runner.yml
vendored
@@ -1,55 +0,0 @@
|
|||||||
name: Release runner
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
bundle-tag:
|
|
||||||
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
|
|
||||||
required: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
release-runner:
|
|
||||||
timeout-minutes: 30
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
extension: ["linux", "macos", "win.exe"]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Build runner
|
|
||||||
run: |
|
|
||||||
cd runner
|
|
||||||
npm install
|
|
||||||
npm run build-runner
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: codeql-runner-${{matrix.extension}}
|
|
||||||
path: runner/dist/codeql-runner-${{matrix.extension}}
|
|
||||||
|
|
||||||
- name: Resolve Upload URL for the release
|
|
||||||
if: ${{ github.event.inputs.bundle-tag != null }}
|
|
||||||
id: save_url
|
|
||||||
run: |
|
|
||||||
UPLOAD_URL=$(curl -sS \
|
|
||||||
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
|
|
||||||
-H "Accept: application/json" \
|
|
||||||
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
|
|
||||||
echo ${UPLOAD_URL}
|
|
||||||
echo "::set-output name=upload_url::${UPLOAD_URL}"
|
|
||||||
|
|
||||||
- name: Upload Platform Package
|
|
||||||
if: ${{ github.event.inputs.bundle-tag != null }}
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.save_url.outputs.upload_url }}
|
|
||||||
asset_path: runner/dist/codeql-runner-${{matrix.extension}}
|
|
||||||
asset_name: codeql-runner-${{matrix.extension}}
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
74
.github/workflows/split.yml
vendored
74
.github/workflows/split.yml
vendored
@@ -1,74 +0,0 @@
|
|||||||
#
|
|
||||||
# Split the CodeQL Bundle into platform bundles
|
|
||||||
#
|
|
||||||
# Instructions:
|
|
||||||
# 1. Upload the new codeql-bundle (codeql-bundle.tar.gz) as an asset of the
|
|
||||||
# release (codeql-bundle-20200826)
|
|
||||||
# 2. Take note of the CLI Release used by the bundle (e.g., v2.2.5)
|
|
||||||
# 3. Manually launch this workflow file (via the Actions UI) specifying
|
|
||||||
# - The CLI Release (e.g., v2.2.5)
|
|
||||||
# - The release tag (e.g., codeql-bundle-20200826)
|
|
||||||
# 4. If everything succeeds you should see 3 new assets.
|
|
||||||
#
|
|
||||||
|
|
||||||
name: Split Bundle
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
cli-release:
|
|
||||||
description: 'CodeQL CLI Release (e.g., "v2.2.5")'
|
|
||||||
required: true
|
|
||||||
bundle-tag:
|
|
||||||
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
|
|
||||||
required: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 30
|
|
||||||
env:
|
|
||||||
CLI_RELEASE: "${{ github.event.inputs.cli-release }}"
|
|
||||||
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
platform: ["linux64", "osx64", "win64"]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Resolve Upload URL for the release
|
|
||||||
id: save_url
|
|
||||||
run: |
|
|
||||||
UPLOAD_URL=$(curl -sS \
|
|
||||||
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
|
|
||||||
-H "Accept: application/json" \
|
|
||||||
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
|
|
||||||
echo ${UPLOAD_URL}
|
|
||||||
echo "::set-output name=upload_url::${UPLOAD_URL}"
|
|
||||||
|
|
||||||
- name: Download CodeQL CLI and Bundle
|
|
||||||
run: |
|
|
||||||
wget --no-verbose "https://github.com/${GITHUB_REPOSITORY}/releases/download/${RELEASE_TAG}/codeql-bundle.tar.gz"
|
|
||||||
wget --no-verbose "https://github.com/github/codeql-cli-binaries/releases/download/${CLI_RELEASE}/codeql-${{matrix.platform}}.zip"
|
|
||||||
|
|
||||||
- name: Create Platform Package
|
|
||||||
# Replace the codeql-binaries with the platform specific ones
|
|
||||||
run: |
|
|
||||||
gunzip codeql-bundle.tar.gz
|
|
||||||
tar -f codeql-bundle.tar --delete codeql
|
|
||||||
unzip -q codeql-${{matrix.platform}}.zip
|
|
||||||
tar -f codeql-bundle.tar --append codeql
|
|
||||||
gzip codeql-bundle.tar
|
|
||||||
mv codeql-bundle.tar.gz codeql-bundle-${{matrix.platform}}.tar.gz
|
|
||||||
du -sh codeql-bundle-${{matrix.platform}}.tar.gz
|
|
||||||
|
|
||||||
- name: Upload Platform Package
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.save_url.outputs.upload_url }}
|
|
||||||
asset_path: ./codeql-bundle-${{matrix.platform}}.tar.gz
|
|
||||||
asset_name: codeql-bundle-${{matrix.platform}}.tar.gz
|
|
||||||
asset_content_type: application/tar+gzip
|
|
||||||
4
.github/workflows/update-dependencies.yml
vendored
4
.github/workflows/update-dependencies.yml
vendored
@@ -6,12 +6,12 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
update:
|
update:
|
||||||
name: Update dependencies
|
name: Update dependencies
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
if: contains(github.event.pull_request.labels.*.name, 'Update dependencies') && (github.event.pull_request.head.repo.full_name == 'github/codeql-action')
|
if: contains(github.event.pull_request.labels.*.name, 'Update dependencies') && (github.event.pull_request.head.repo.full_name == 'github/codeql-action')
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Remove PR label
|
- name: Remove PR label
|
||||||
env:
|
env:
|
||||||
|
|||||||
47
.github/workflows/update-release-branch.yml
vendored
47
.github/workflows/update-release-branch.yml
vendored
@@ -1,25 +1,35 @@
|
|||||||
name: Update release branch
|
name: Update release branch
|
||||||
on:
|
on:
|
||||||
repository_dispatch:
|
# You can trigger this workflow via workflow dispatch to start a release.
|
||||||
# Example of how to trigger this:
|
# This will open a PR to update the v2 release branch.
|
||||||
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
|
||||||
# Replace <token> with a personal access token from this page: https://github.com/settings/tokens
|
|
||||||
types: [update-release-branch]
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
# When the v2 release is complete, this workflow will open a PR to update the v1 release branch.
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- releases/v2
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
update:
|
update:
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: ${{ github.repository == 'github/codeql-action' }}
|
if: github.repository == 'github/codeql-action'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- name: Dump environment
|
||||||
|
run: env
|
||||||
|
|
||||||
|
- name: Dump GitHub context
|
||||||
|
env:
|
||||||
|
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||||
|
run: echo "$GITHUB_CONTEXT"
|
||||||
|
|
||||||
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
# Need full history so we calculate diffs
|
# Need full history so we calculate diffs
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v3
|
||||||
with:
|
with:
|
||||||
python-version: 3.8
|
python-version: 3.8
|
||||||
|
|
||||||
@@ -33,5 +43,20 @@ jobs:
|
|||||||
git config --global user.email "github-actions@github.com"
|
git config --global user.email "github-actions@github.com"
|
||||||
git config --global user.name "github-actions[bot]"
|
git config --global user.name "github-actions[bot]"
|
||||||
|
|
||||||
- name: Update release branch
|
- name: Update v2 release branch
|
||||||
run: python .github/update-release-branch.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }}
|
if: github.event_name == 'workflow_dispatch'
|
||||||
|
run: |
|
||||||
|
python .github/update-release-branch.py \
|
||||||
|
--github-token ${{ secrets.GITHUB_TOKEN }} \
|
||||||
|
--repository-nwo ${{ github.repository }} \
|
||||||
|
--mode v2-release \
|
||||||
|
--conductor ${GITHUB_ACTOR}
|
||||||
|
|
||||||
|
- name: Update v1 release branch
|
||||||
|
if: github.event_name == 'push'
|
||||||
|
run: |
|
||||||
|
python .github/update-release-branch.py \
|
||||||
|
--github-token ${{ secrets.GITHUB_TOKEN }} \
|
||||||
|
--repository-nwo ${{ github.repository }} \
|
||||||
|
--mode v1-release \
|
||||||
|
--conductor ${GITHUB_ACTOR}
|
||||||
|
|||||||
@@ -7,19 +7,19 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
update-supported-enterprise-server-versions:
|
update-supported-enterprise-server-versions:
|
||||||
name: Update Supported Enterprise Server Versions
|
name: Update Supported Enterprise Server Versions
|
||||||
timeout-minutes: 30
|
timeout-minutes: 45
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: ${{ github.repository == 'github/codeql-action' }}
|
if: ${{ github.repository == 'github/codeql-action' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v3
|
||||||
with:
|
with:
|
||||||
python-version: "3.7"
|
python-version: "3.7"
|
||||||
- name: Checkout CodeQL Action
|
- name: Checkout CodeQL Action
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
- name: Checkout Enterprise Releases
|
- name: Checkout Enterprise Releases
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: github/enterprise-releases
|
repository: github/enterprise-releases
|
||||||
ssh-key: ${{ secrets.ENTERPRISE_RELEASES_SSH_KEY }}
|
ssh-key: ${{ secrets.ENTERPRISE_RELEASES_SSH_KEY }}
|
||||||
|
|||||||
23
CHANGELOG.md
23
CHANGELOG.md
@@ -1,8 +1,25 @@
|
|||||||
# CodeQL Action Changelog
|
# CodeQL Action Changelog
|
||||||
|
|
||||||
## [UNRELEASED]
|
## 1.1.9 - 27 Apr 2022
|
||||||
|
|
||||||
No user facing changes.
|
- Add `working-directory` input to the `autobuild` action. [#1024](https://github.com/github/codeql-action/pull/1024)
|
||||||
|
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`. [#1007](https://github.com/github/codeql-action/pull/1007)
|
||||||
|
- Update default CodeQL bundle version to 2.9.0.
|
||||||
|
- Fix a bug where [status reporting fails on Windows](https://github.com/github/codeql-action/issues/1041). [#1042](https://github.com/github/codeql-action/pull/1042)
|
||||||
|
|
||||||
|
## 1.1.8 - 08 Apr 2022
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.8.5. [#1014](https://github.com/github/codeql-action/pull/1014)
|
||||||
|
- Fix error where the init action would fail due to a GitHub API request that was taking too long to complete [#1025](https://github.com/github/codeql-action/pull/1025)
|
||||||
|
|
||||||
|
## 1.1.7 - 05 Apr 2022
|
||||||
|
|
||||||
|
- A bug where additional queries specified in the workflow file would sometimes not be respected has been fixed. [#1018](https://github.com/github/codeql-action/pull/1018)
|
||||||
|
|
||||||
|
## 1.1.6 - 30 Mar 2022
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.8.4. [#990](https://github.com/github/codeql-action/pull/990)
|
||||||
|
- Fix a bug where an invalid `commit_oid` was being sent to code scanning when a custom checkout path was being used. [#956](https://github.com/github/codeql-action/pull/956)
|
||||||
|
|
||||||
## 1.1.5 - 15 Mar 2022
|
## 1.1.5 - 15 Mar 2022
|
||||||
|
|
||||||
@@ -19,7 +36,7 @@ No user facing changes.
|
|||||||
|
|
||||||
## 1.1.3 - 23 Feb 2022
|
## 1.1.3 - 23 Feb 2022
|
||||||
|
|
||||||
- Fix bug where the CLR traces can continue tracing even after tracing should be stopped. [#938](https://github.com/github/codeql-action/pull/938)
|
- Fix a bug where the CLR traces can continue tracing even after tracing should be stopped. [#938](https://github.com/github/codeql-action/pull/938)
|
||||||
|
|
||||||
## 1.1.2 - 17 Feb 2022
|
## 1.1.2 - 17 Feb 2022
|
||||||
|
|
||||||
|
|||||||
@@ -1 +1,3 @@
|
|||||||
**/* @github/codeql-action-reviewers
|
**/* @github/codeql-action-reviewers
|
||||||
|
|
||||||
|
/python-setup/ @github/codeql-python @github/codeql-action-reviewers
|
||||||
|
|||||||
@@ -61,16 +61,22 @@ Here are a few things you can do that will increase the likelihood of your pull
|
|||||||
## Releasing (write access required)
|
## Releasing (write access required)
|
||||||
|
|
||||||
1. The first step of releasing a new version of the `codeql-action` is running the "Update release branch" workflow.
|
1. The first step of releasing a new version of the `codeql-action` is running the "Update release branch" workflow.
|
||||||
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `v1` release branch.
|
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `releases/v2` release branch.
|
||||||
|
|
||||||
A release is automatically started every Monday via a scheduled run of this workflow, however you can start a release manually by triggering a run via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
|
You can start a release by triggering this workflow via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
|
||||||
1. The workflow run will open a pull request titled "Merge main into v1". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
|
1. The workflow run will open a pull request titled "Merge main into releases/v2". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
|
||||||
1. Review the checklist items in the pull request description.
|
1. Review the checklist items in the pull request description.
|
||||||
Once you've checked off all but the last of these, approve the PR and automerge it.
|
Once you've checked off all but the last two of these, approve the PR and automerge it.
|
||||||
1. When the "Merge main into v1" pull request is merged into the `v1` branch, the "Tag release and merge back" workflow will create a mergeback PR.
|
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Tag release and merge back" workflow will create a mergeback PR.
|
||||||
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into v1" pull request, and bumps the patch version of the CodeQL Action.
|
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into releases/v2" pull request, and bumps the patch version of the CodeQL Action.
|
||||||
|
|
||||||
Approve the mergeback PR and automerge it. Once the mergeback has been merged into main, the release is complete.
|
Approve the mergeback PR and automerge it.
|
||||||
|
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Update release branch" workflow will create a "Merge releases/v2 into releases/v1" pull request to merge the changes since the last release into the `releases/v1` release branch.
|
||||||
|
This ensures we keep both the `releases/v1` and `releases/v2` release branches up to date and fully supported.
|
||||||
|
|
||||||
|
Review the checklist items in the pull request description.
|
||||||
|
Once you've checked off all the items, approve the PR and automerge it.
|
||||||
|
1. Once the mergeback has been merged to `main` and the "Merge releases/v2 into releases/v1" PR has been merged to `releases/v1`, the release is complete.
|
||||||
|
|
||||||
## Keeping the PR checks up to date (admin access required)
|
## Keeping the PR checks up to date (admin access required)
|
||||||
|
|
||||||
@@ -85,12 +91,12 @@ To regenerate the PR jobs for the action:
|
|||||||
CHECKS="$(gh api repos/github/codeql-action/commits/${SHA}/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or . == "Update dependencies" or . == "Update Supported Enterprise Server Versions" | not)]')"
|
CHECKS="$(gh api repos/github/codeql-action/commits/${SHA}/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or . == "Update dependencies" or . == "Update Supported Enterprise Server Versions" | not)]')"
|
||||||
echo "{\"contexts\": ${CHECKS}}" > checks.json
|
echo "{\"contexts\": ${CHECKS}}" > checks.json
|
||||||
gh api -X "PATCH" repos/github/codeql-action/branches/main/protection/required_status_checks --input checks.json
|
gh api -X "PATCH" repos/github/codeql-action/branches/main/protection/required_status_checks --input checks.json
|
||||||
gh api -X "PATCH" repos/github/codeql-action/branches/v1/protection/required_status_checks --input checks.json
|
gh api -X "PATCH" repos/github/codeql-action/branches/releases/v2/protection/required_status_checks --input checks.json
|
||||||
|
gh api -X "PATCH" repos/github/codeql-action/branches/releases/v1/protection/required_status_checks --input checks.json
|
||||||
````
|
````
|
||||||
|
|
||||||
2. Go to the [branch protection rules settings page](https://github.com/github/codeql-action/settings/branches) and validate that the rules have been updated.
|
2. Go to the [branch protection rules settings page](https://github.com/github/codeql-action/settings/branches) and validate that the rules have been updated.
|
||||||
|
|
||||||
|
|
||||||
## Resources
|
## Resources
|
||||||
|
|
||||||
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
||||||
|
|||||||
22
README.md
22
README.md
@@ -52,11 +52,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v1
|
uses: github/codeql-action/init@v2
|
||||||
# Override language selection by uncommenting this and choosing your languages
|
# Override language selection by uncommenting this and choosing your languages
|
||||||
# with:
|
# with:
|
||||||
# languages: go, javascript, csharp, python, cpp, java
|
# languages: go, javascript, csharp, python, cpp, java
|
||||||
@@ -64,10 +64,10 @@ jobs:
|
|||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below).
|
# If this step fails, then you should remove it and run the build manually (see below).
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v1
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 https://git.io/JvXDl
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
|
|
||||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following
|
# ✏️ If the Autobuild fails above, remove it and uncomment the following
|
||||||
# three lines and modify them (or add more) to build your code if your
|
# three lines and modify them (or add more) to build your code if your
|
||||||
@@ -78,14 +78,14 @@ jobs:
|
|||||||
# make release
|
# make release
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v1
|
uses: github/codeql-action/analyze@v2
|
||||||
```
|
```
|
||||||
|
|
||||||
If you prefer to integrate this within an existing CI workflow, it should end up looking something like this:
|
If you prefer to integrate this within an existing CI workflow, it should end up looking something like this:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v1
|
uses: github/codeql-action/init@v2
|
||||||
with:
|
with:
|
||||||
languages: go, javascript
|
languages: go, javascript
|
||||||
|
|
||||||
@@ -95,7 +95,7 @@ If you prefer to integrate this within an existing CI workflow, it should end up
|
|||||||
make release
|
make release
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v1
|
uses: github/codeql-action/analyze@v2
|
||||||
```
|
```
|
||||||
|
|
||||||
### Configuration file
|
### Configuration file
|
||||||
@@ -103,7 +103,7 @@ If you prefer to integrate this within an existing CI workflow, it should end up
|
|||||||
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`.
|
Use the `config-file` parameter of the `init` action to enable the configuration file. The value of `config-file` is the path to the configuration file you want to use. This example loads the configuration file `./.github/codeql/codeql-config.yml`.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: github/codeql-action/init@v1
|
- uses: github/codeql-action/init@v2
|
||||||
with:
|
with:
|
||||||
config-file: ./.github/codeql/codeql-config.yml
|
config-file: ./.github/codeql/codeql-config.yml
|
||||||
```
|
```
|
||||||
@@ -111,7 +111,7 @@ Use the `config-file` parameter of the `init` action to enable the configuration
|
|||||||
The configuration file can be located in a different repository. This is useful if you want to share the same configuration across multiple repositories. If the configuration file is in a private repository you can also specify an `external-repository-token` option. This should be a personal access token that has read access to any repositories containing referenced config files and queries.
|
The configuration file can be located in a different repository. This is useful if you want to share the same configuration across multiple repositories. If the configuration file is in a private repository you can also specify an `external-repository-token` option. This should be a personal access token that has read access to any repositories containing referenced config files and queries.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: github/codeql-action/init@v1
|
- uses: github/codeql-action/init@v2
|
||||||
with:
|
with:
|
||||||
config-file: owner/repo/codeql-config.yml@branch
|
config-file: owner/repo/codeql-config.yml@branch
|
||||||
external-repository-token: ${{ secrets.EXTERNAL_REPOSITORY_TOKEN }}
|
external-repository-token: ${{ secrets.EXTERNAL_REPOSITORY_TOKEN }}
|
||||||
@@ -122,7 +122,7 @@ For information on how to write a configuration file, see "[Using a custom confi
|
|||||||
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
|
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: github/codeql-action/init@v1
|
- uses: github/codeql-action/init@v2
|
||||||
with:
|
with:
|
||||||
queries: <local-or-remote-query>,<another-query>
|
queries: <local-or-remote-query>,<another-query>
|
||||||
```
|
```
|
||||||
@@ -130,7 +130,7 @@ If you only want to customise the queries used, you can specify them in your wor
|
|||||||
By default, this will override any queries specified in a config file. If you wish to use both sets of queries, prefix the list of queries in the workflow with `+`:
|
By default, this will override any queries specified in a config file. If you wish to use both sets of queries, prefix the list of queries in the workflow with `+`:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: github/codeql-action/init@v1
|
- uses: github/codeql-action/init@v2
|
||||||
with:
|
with:
|
||||||
queries: +<local-or-remote-query>,<another-query>
|
queries: +<local-or-remote-query>,<another-query>
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ inputs:
|
|||||||
wait-for-processing:
|
wait-for-processing:
|
||||||
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
|
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
|
||||||
required: true
|
required: true
|
||||||
default: "false"
|
default: "true"
|
||||||
token:
|
token:
|
||||||
default: ${{ github.token }}
|
default: ${{ github.token }}
|
||||||
matrix:
|
matrix:
|
||||||
|
|||||||
@@ -6,6 +6,12 @@ inputs:
|
|||||||
default: ${{ github.token }}
|
default: ${{ github.token }}
|
||||||
matrix:
|
matrix:
|
||||||
default: ${{ toJson(matrix) }}
|
default: ${{ toJson(matrix) }}
|
||||||
|
working-directory:
|
||||||
|
description: >-
|
||||||
|
Run the autobuilder using this path (relative to $GITHUB_WORKSPACE) as
|
||||||
|
working directory. If this input is not set, the autobuilder runs with
|
||||||
|
$GITHUB_WORKSPACE as its working directory.
|
||||||
|
required: false
|
||||||
runs:
|
runs:
|
||||||
using: 'node12'
|
using: 'node12'
|
||||||
main: '../lib/autobuild-action.js'
|
main: '../lib/autobuild-action.js'
|
||||||
|
|||||||
19
lib/actions-util.js
generated
19
lib/actions-util.js
generated
@@ -76,7 +76,7 @@ exports.getToolCacheDirectory = getToolCacheDirectory;
|
|||||||
/**
|
/**
|
||||||
* Gets the SHA of the commit that is currently checked out.
|
* Gets the SHA of the commit that is currently checked out.
|
||||||
*/
|
*/
|
||||||
const getCommitOid = async function (ref = "HEAD") {
|
const getCommitOid = async function (checkoutPath, ref = "HEAD") {
|
||||||
// Try to use git to get the current commit SHA. If that fails then
|
// Try to use git to get the current commit SHA. If that fails then
|
||||||
// log but otherwise silently fall back to using the SHA from the environment.
|
// log but otherwise silently fall back to using the SHA from the environment.
|
||||||
// The only time these two values will differ is during analysis of a PR when
|
// The only time these two values will differ is during analysis of a PR when
|
||||||
@@ -96,6 +96,7 @@ const getCommitOid = async function (ref = "HEAD") {
|
|||||||
process.stderr.write(data);
|
process.stderr.write(data);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
cwd: checkoutPath,
|
||||||
}).exec();
|
}).exec();
|
||||||
return commitOid.trim();
|
return commitOid.trim();
|
||||||
}
|
}
|
||||||
@@ -115,6 +116,7 @@ const determineMergeBaseCommitOid = async function () {
|
|||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||||
|
const checkoutPath = (0, exports.getOptionalInput)("checkout_path");
|
||||||
try {
|
try {
|
||||||
let commitOid = "";
|
let commitOid = "";
|
||||||
let baseOid = "";
|
let baseOid = "";
|
||||||
@@ -139,6 +141,7 @@ const determineMergeBaseCommitOid = async function () {
|
|||||||
process.stderr.write(data);
|
process.stderr.write(data);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
cwd: checkoutPath,
|
||||||
}).exec();
|
}).exec();
|
||||||
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
|
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
|
||||||
if (commitOid === mergeSha &&
|
if (commitOid === mergeSha &&
|
||||||
@@ -354,7 +357,7 @@ async function getWorkflowPath() {
|
|||||||
const repo = repo_nwo[1];
|
const repo = repo_nwo[1];
|
||||||
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
||||||
const apiClient = api.getActionsApiClient();
|
const apiClient = api.getActionsApiClient();
|
||||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
|
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
||||||
owner,
|
owner,
|
||||||
repo,
|
repo,
|
||||||
run_id,
|
run_id,
|
||||||
@@ -427,6 +430,9 @@ async function getRef() {
|
|||||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||||
const refInput = (0, exports.getOptionalInput)("ref");
|
const refInput = (0, exports.getOptionalInput)("ref");
|
||||||
const shaInput = (0, exports.getOptionalInput)("sha");
|
const shaInput = (0, exports.getOptionalInput)("sha");
|
||||||
|
const checkoutPath = (0, exports.getOptionalInput)("checkout_path") ||
|
||||||
|
(0, exports.getOptionalInput)("source-root") ||
|
||||||
|
(0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE");
|
||||||
const hasRefInput = !!refInput;
|
const hasRefInput = !!refInput;
|
||||||
const hasShaInput = !!shaInput;
|
const hasShaInput = !!shaInput;
|
||||||
// If one of 'ref' or 'sha' are provided, both are required
|
// If one of 'ref' or 'sha' are provided, both are required
|
||||||
@@ -448,15 +454,14 @@ async function getRef() {
|
|||||||
if (!pull_ref_regex.test(ref)) {
|
if (!pull_ref_regex.test(ref)) {
|
||||||
return ref;
|
return ref;
|
||||||
}
|
}
|
||||||
const head = await (0, exports.getCommitOid)("HEAD");
|
const head = await (0, exports.getCommitOid)(checkoutPath, "HEAD");
|
||||||
// in actions/checkout@v2 we can check if git rev-parse HEAD == GITHUB_SHA
|
// in actions/checkout@v2+ we can check if git rev-parse HEAD == GITHUB_SHA
|
||||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||||
// using GITHUB_REF. There is a subtle race condition where
|
// using GITHUB_REF. There is a subtle race condition where
|
||||||
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||||
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
|
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||||
const hasChangedRef = sha !== head &&
|
const hasChangedRef = sha !== head &&
|
||||||
(await (0, exports.getCommitOid)(ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !==
|
(await (0, exports.getCommitOid)(checkoutPath, ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !== head;
|
||||||
head;
|
|
||||||
if (hasChangedRef) {
|
if (hasChangedRef) {
|
||||||
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
||||||
core.debug(`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`);
|
core.debug(`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`);
|
||||||
@@ -550,7 +555,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
|||||||
statusReport.runner_os_release = os.release();
|
statusReport.runner_os_release = os.release();
|
||||||
}
|
}
|
||||||
if (codeQlCliVersion !== undefined) {
|
if (codeQlCliVersion !== undefined) {
|
||||||
statusReport.codeql_cli_version = codeQlCliVersion;
|
statusReport.codeql_version = codeQlCliVersion;
|
||||||
}
|
}
|
||||||
return statusReport;
|
return statusReport;
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
132
lib/actions-util.test.js
generated
132
lib/actions-util.test.js
generated
@@ -39,74 +39,93 @@ function errorCodes(actual, expected) {
|
|||||||
await t.throwsAsync(actionsutil.getRef);
|
await t.throwsAsync(actionsutil.getRef);
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
||||||
const expectedRef = "refs/pull/1/merge";
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const currentSha = "a".repeat(40);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
process.env["GITHUB_REF"] = expectedRef;
|
const expectedRef = "refs/pull/1/merge";
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
const currentSha = "a".repeat(40);
|
||||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
callback.withArgs("HEAD").resolves(currentSha);
|
process.env["GITHUB_SHA"] = currentSha;
|
||||||
const actualRef = await actionsutil.getRef();
|
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||||
t.deepEqual(actualRef, expectedRef);
|
callback.withArgs("HEAD").resolves(currentSha);
|
||||||
callback.restore();
|
const actualRef = await actionsutil.getRef();
|
||||||
|
t.deepEqual(actualRef, expectedRef);
|
||||||
|
callback.restore();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
||||||
const expectedRef = "refs/pull/1/merge";
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
process.env["GITHUB_REF"] = expectedRef;
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
const expectedRef = "refs/pull/1/merge";
|
||||||
const sha = "a".repeat(40);
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||||
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
const sha = "a".repeat(40);
|
||||||
callback.withArgs("HEAD").resolves(sha);
|
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||||
const actualRef = await actionsutil.getRef();
|
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||||
t.deepEqual(actualRef, expectedRef);
|
callback.withArgs("HEAD").resolves(sha);
|
||||||
callback.restore();
|
const actualRef = await actionsutil.getRef();
|
||||||
|
t.deepEqual(actualRef, expectedRef);
|
||||||
|
callback.restore();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
(0, ava_1.default)("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
||||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||||
callback.withArgs("refs/pull/1/merge").resolves("a".repeat(40));
|
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||||
const actualRef = await actionsutil.getRef();
|
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
||||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
||||||
callback.restore();
|
const actualRef = await actionsutil.getRef();
|
||||||
|
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||||
|
callback.restore();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
||||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||||
// These values are be ignored
|
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
||||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
||||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
// These values are be ignored
|
||||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||||
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
const callback = sinon.stub(actionsutil, "getCommitOid");
|
||||||
const actualRef = await actionsutil.getRef();
|
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
||||||
t.deepEqual(actualRef, "refs/pull/2/merge");
|
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||||
callback.restore();
|
const actualRef = await actionsutil.getRef();
|
||||||
getAdditionalInputStub.restore();
|
t.deepEqual(actualRef, "refs/pull/2/merge");
|
||||||
|
callback.restore();
|
||||||
|
getAdditionalInputStub.restore();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
await t.throwsAsync(async () => {
|
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||||
await actionsutil.getRef();
|
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
||||||
}, {
|
await t.throwsAsync(async () => {
|
||||||
instanceOf: Error,
|
await actionsutil.getRef();
|
||||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
}, {
|
||||||
|
instanceOf: Error,
|
||||||
|
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||||
|
});
|
||||||
|
getAdditionalInputStub.restore();
|
||||||
});
|
});
|
||||||
getAdditionalInputStub.restore();
|
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("getRef() throws an error if only `sha` is provided as an input", async (t) => {
|
(0, ava_1.default)("getRef() throws an error if only `sha` is provided as an input", async (t) => {
|
||||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
await t.throwsAsync(async () => {
|
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
||||||
await actionsutil.getRef();
|
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
||||||
}, {
|
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
||||||
instanceOf: Error,
|
await t.throwsAsync(async () => {
|
||||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
await actionsutil.getRef();
|
||||||
|
}, {
|
||||||
|
instanceOf: Error,
|
||||||
|
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||||
|
});
|
||||||
|
getAdditionalInputStub.restore();
|
||||||
});
|
});
|
||||||
getAdditionalInputStub.restore();
|
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("computeAutomationID()", async (t) => {
|
(0, ava_1.default)("computeAutomationID()", async (t) => {
|
||||||
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
||||||
@@ -461,6 +480,7 @@ on: ["push"]
|
|||||||
});
|
});
|
||||||
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
|
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
const envFile = path.join(tmpDir, "event.json");
|
const envFile = path.join(tmpDir, "event.json");
|
||||||
fs.writeFileSync(envFile, JSON.stringify({
|
fs.writeFileSync(envFile, JSON.stringify({
|
||||||
repository: {
|
repository: {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
3
lib/analysis-paths.test.js
generated
3
lib/analysis-paths.test.js
generated
@@ -45,6 +45,7 @@ const util = __importStar(require("./util"));
|
|||||||
debugMode: false,
|
debugMode: false,
|
||||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||||
|
injectedMlQueries: false,
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||||
@@ -69,6 +70,7 @@ const util = __importStar(require("./util"));
|
|||||||
debugMode: false,
|
debugMode: false,
|
||||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||||
|
injectedMlQueries: false,
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||||
@@ -94,6 +96,7 @@ const util = __importStar(require("./util"));
|
|||||||
debugMode: false,
|
debugMode: false,
|
||||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||||
|
injectedMlQueries: false,
|
||||||
};
|
};
|
||||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,KAAK;SACzB,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
2
lib/analyze.js
generated
2
lib/analyze.js
generated
@@ -131,11 +131,11 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
if (!hasBuiltinQueries && !hasCustomQueries && !hasPackWithCustomQueries) {
|
if (!hasBuiltinQueries && !hasCustomQueries && !hasPackWithCustomQueries) {
|
||||||
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
|
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
|
||||||
}
|
}
|
||||||
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
try {
|
try {
|
||||||
if (hasPackWithCustomQueries) {
|
if (hasPackWithCustomQueries) {
|
||||||
logger.info("Performing analysis with custom CodeQL Packs.");
|
logger.info("Performing analysis with custom CodeQL Packs.");
|
||||||
logger.startGroup(`Downloading custom packs for ${language}`);
|
logger.startGroup(`Downloading custom packs for ${language}`);
|
||||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
|
||||||
const results = await codeql.packDownload(packsWithVersion);
|
const results = await codeql.packDownload(packsWithVersion);
|
||||||
logger.info(`Downloaded packs: ${results.packs
|
logger.info(`Downloaded packs: ${results.packs
|
||||||
.map((r) => `${r.name}@${r.version || "latest"}`)
|
.map((r) => `${r.name}@${r.version || "latest"}`)
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
1
lib/analyze.test.js
generated
1
lib/analyze.test.js
generated
@@ -128,6 +128,7 @@ const util = __importStar(require("./util"));
|
|||||||
debugMode: false,
|
debugMode: false,
|
||||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||||
|
injectedMlQueries: false,
|
||||||
};
|
};
|
||||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||||
recursive: true,
|
recursive: true,
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
5
lib/autobuild-action.js
generated
5
lib/autobuild-action.js
generated
@@ -52,6 +52,11 @@ async function run() {
|
|||||||
}
|
}
|
||||||
language = (0, autobuild_1.determineAutobuildLanguage)(config, logger);
|
language = (0, autobuild_1.determineAutobuildLanguage)(config, logger);
|
||||||
if (language !== undefined) {
|
if (language !== undefined) {
|
||||||
|
const workingDirectory = (0, actions_util_1.getOptionalInput)("working-directory");
|
||||||
|
if (workingDirectory) {
|
||||||
|
logger.info(`Changing autobuilder working directory to ${workingDirectory}`);
|
||||||
|
process.chdir(workingDirectory);
|
||||||
|
}
|
||||||
await (0, autobuild_1.runAutobuild)(language, config, logger);
|
await (0, autobuild_1.runAutobuild)(language, config, logger);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAMwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAOwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
17
lib/codeql.js
generated
17
lib/codeql.js
generated
@@ -27,7 +27,6 @@ const fs = __importStar(require("fs"));
|
|||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
|
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
|
||||||
const query_string_1 = __importDefault(require("query-string"));
|
const query_string_1 = __importDefault(require("query-string"));
|
||||||
const semver = __importStar(require("semver"));
|
const semver = __importStar(require("semver"));
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
@@ -76,7 +75,6 @@ const CODEQL_VERSION_GROUP_RULES = "2.5.5";
|
|||||||
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
|
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
|
||||||
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
|
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
|
||||||
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
|
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
|
||||||
const CODEQL_VERSION_CONFIG_FILES = "2.8.2"; // Versions before 2.8.2 weren't tolerant to unknown properties
|
|
||||||
exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
|
exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
|
||||||
/**
|
/**
|
||||||
* This variable controls using the new style of tracing from the CodeQL
|
* This variable controls using the new style of tracing from the CodeQL
|
||||||
@@ -396,7 +394,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
async getVersion() {
|
async getVersion() {
|
||||||
let result = util.getCachedCodeQlVersion();
|
let result = util.getCachedCodeQlVersion();
|
||||||
if (result === undefined) {
|
if (result === undefined) {
|
||||||
result = await runTool(cmd, ["version", "--format=terse"]);
|
result = (await runTool(cmd, ["version", "--format=terse"])).trim();
|
||||||
util.cacheCodeQlVersion(result);
|
util.cacheCodeQlVersion(result);
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
@@ -468,11 +466,6 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
extraArgs.push(`--trace-process-level=${processLevel || 3}`);
|
extraArgs.push(`--trace-process-level=${processLevel || 3}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (await util.codeQlVersionAbove(codeql, CODEQL_VERSION_CONFIG_FILES)) {
|
|
||||||
const configLocation = path.resolve(config.tempDir, "user-config.yaml");
|
|
||||||
fs.writeFileSync(configLocation, yaml.dump(config.originalUserInput));
|
|
||||||
extraArgs.push(`--codescanning-config=${configLocation}`);
|
|
||||||
}
|
|
||||||
await runTool(cmd, [
|
await runTool(cmd, [
|
||||||
"database",
|
"database",
|
||||||
"init",
|
"init",
|
||||||
@@ -593,9 +586,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
if (extraSearchPath !== undefined) {
|
if (extraSearchPath !== undefined) {
|
||||||
codeqlArgs.push("--additional-packs", extraSearchPath);
|
codeqlArgs.push("--additional-packs", extraSearchPath);
|
||||||
}
|
}
|
||||||
if (!(await util.codeQlVersionAbove(this, CODEQL_VERSION_CONFIG_FILES))) {
|
codeqlArgs.push(querySuitePath);
|
||||||
codeqlArgs.push(querySuitePath);
|
|
||||||
}
|
|
||||||
await runTool(cmd, codeqlArgs);
|
await runTool(cmd, codeqlArgs);
|
||||||
},
|
},
|
||||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId) {
|
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, automationDetailsId) {
|
||||||
@@ -622,9 +613,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
codeqlArgs.push("--sarif-category", automationDetailsId);
|
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||||
}
|
}
|
||||||
codeqlArgs.push(databasePath);
|
codeqlArgs.push(databasePath);
|
||||||
if (!(await util.codeQlVersionAbove(this, CODEQL_VERSION_CONFIG_FILES))) {
|
codeqlArgs.push(...querySuitePaths);
|
||||||
codeqlArgs.push(...querySuitePaths);
|
|
||||||
}
|
|
||||||
// capture stdout, which contains analysis summaries
|
// capture stdout, which contains analysis summaries
|
||||||
return await runTool(cmd, codeqlArgs);
|
return await runTool(cmd, codeqlArgs);
|
||||||
},
|
},
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
37
lib/config-utils.js
generated
37
lib/config-utils.js
generated
@@ -118,9 +118,11 @@ const builtinSuites = ["security-extended", "security-and-quality"];
|
|||||||
/**
|
/**
|
||||||
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
||||||
* Throws an error if suiteName is not a valid builtin suite.
|
* Throws an error if suiteName is not a valid builtin suite.
|
||||||
|
* May inject ML queries, and the return value will declare if this was done.
|
||||||
*/
|
*/
|
||||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureFlags, configFile) {
|
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureFlags, configFile) {
|
||||||
var _a;
|
var _a;
|
||||||
|
let injectedMlQueries = false;
|
||||||
const found = builtinSuites.find((suite) => suite === suiteName);
|
const found = builtinSuites.find((suite) => suite === suiteName);
|
||||||
if (!found) {
|
if (!found) {
|
||||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||||
@@ -128,18 +130,23 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
|
|||||||
// If we're running the JavaScript security-extended analysis (or a superset of it), the repo is
|
// If we're running the JavaScript security-extended analysis (or a superset of it), the repo is
|
||||||
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
|
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
|
||||||
// pack, then add the ML-powered query pack so that we run ML-powered queries.
|
// pack, then add the ML-powered query pack so that we run ML-powered queries.
|
||||||
if (languages.includes("javascript") &&
|
if (
|
||||||
|
// Disable ML-powered queries on Windows
|
||||||
|
process.platform !== "win32" &&
|
||||||
|
languages.includes("javascript") &&
|
||||||
(found === "security-extended" || found === "security-and-quality") &&
|
(found === "security-extended" || found === "security-and-quality") &&
|
||||||
!((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some((pack) => pack.packName === util_1.ML_POWERED_JS_QUERIES_PACK.packName)) &&
|
!((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some((pack) => pack.packName === util_1.ML_POWERED_JS_QUERIES_PACK_NAME)) &&
|
||||||
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
|
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
|
||||||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
|
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
|
||||||
if (!packs.javascript) {
|
if (!packs.javascript) {
|
||||||
packs.javascript = [];
|
packs.javascript = [];
|
||||||
}
|
}
|
||||||
packs.javascript.push(util_1.ML_POWERED_JS_QUERIES_PACK);
|
packs.javascript.push(await (0, util_1.getMlPoweredJsQueriesPack)(codeQL));
|
||||||
|
injectedMlQueries = true;
|
||||||
}
|
}
|
||||||
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
|
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
|
||||||
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
||||||
|
return injectedMlQueries;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Retrieve the set of queries at localQueryPath and add them to resultMap.
|
* Retrieve the set of queries at localQueryPath and add them to resultMap.
|
||||||
@@ -196,6 +203,11 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
|
|||||||
* parsing the 'uses' actions in the workflow file. So it can handle
|
* parsing the 'uses' actions in the workflow file. So it can handle
|
||||||
* local paths starting with './', or references to remote repos, or
|
* local paths starting with './', or references to remote repos, or
|
||||||
* a finite set of hardcoded terms for builtin suites.
|
* a finite set of hardcoded terms for builtin suites.
|
||||||
|
*
|
||||||
|
* This may inject ML queries into the packs to use, and the return value will
|
||||||
|
* declare if this was done.
|
||||||
|
*
|
||||||
|
* @returns whether or not we injected ML queries into the packs
|
||||||
*/
|
*/
|
||||||
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureFlags, logger, configFile) {
|
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureFlags, logger, configFile) {
|
||||||
queryUses = queryUses.trim();
|
queryUses = queryUses.trim();
|
||||||
@@ -205,15 +217,15 @@ async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, te
|
|||||||
// Check for the local path case before we start trying to parse the repository name
|
// Check for the local path case before we start trying to parse the repository name
|
||||||
if (queryUses.startsWith("./")) {
|
if (queryUses.startsWith("./")) {
|
||||||
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), workspacePath, configFile);
|
await addLocalQueries(codeQL, resultMap, queryUses.slice(2), workspacePath, configFile);
|
||||||
return;
|
return false;
|
||||||
}
|
}
|
||||||
// Check for one of the builtin suites
|
// Check for one of the builtin suites
|
||||||
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
||||||
await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureFlags, configFile);
|
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureFlags, configFile);
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
// Otherwise, must be a reference to another repo
|
// Otherwise, must be a reference to another repo
|
||||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
// Regex validating stars in paths or paths-ignore entries.
|
// Regex validating stars in paths or paths-ignore entries.
|
||||||
// The intention is to only allow ** to appear when immediately
|
// The intention is to only allow ** to appear when immediately
|
||||||
@@ -422,12 +434,15 @@ async function getLanguages(codeQL, languagesInput, repository, apiDetails, logg
|
|||||||
return parsedLanguages;
|
return parsedLanguages;
|
||||||
}
|
}
|
||||||
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureFlags, logger) {
|
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureFlags, logger) {
|
||||||
|
let injectedMlQueries = false;
|
||||||
queriesInput = queriesInput.trim();
|
queriesInput = queriesInput.trim();
|
||||||
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
||||||
queriesInput = queriesInput.replace(/^\+/, "");
|
queriesInput = queriesInput.replace(/^\+/, "");
|
||||||
for (const query of queriesInput.split(",")) {
|
for (const query of queriesInput.split(",")) {
|
||||||
await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
const didInject = await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||||
|
injectedMlQueries = injectedMlQueries || didInject;
|
||||||
}
|
}
|
||||||
|
return injectedMlQueries;
|
||||||
}
|
}
|
||||||
// Returns true if either no queries were provided in the workflow.
|
// Returns true if either no queries were provided in the workflow.
|
||||||
// or if the queries in the workflow were provided in "additive" mode,
|
// or if the queries in the workflow were provided in "additive" mode,
|
||||||
@@ -454,8 +469,9 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
|
|||||||
}
|
}
|
||||||
await addDefaultQueries(codeQL, languages, queries);
|
await addDefaultQueries(codeQL, languages, queries);
|
||||||
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
|
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
|
||||||
|
let injectedMlQueries = false;
|
||||||
if (queriesInput) {
|
if (queriesInput) {
|
||||||
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
injectedMlQueries = await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
languages,
|
languages,
|
||||||
@@ -472,6 +488,7 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
|
|||||||
debugMode,
|
debugMode,
|
||||||
debugArtifactName,
|
debugArtifactName,
|
||||||
debugDatabaseName,
|
debugDatabaseName,
|
||||||
|
injectedMlQueries,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
exports.getDefaultConfig = getDefaultConfig;
|
exports.getDefaultConfig = getDefaultConfig;
|
||||||
@@ -524,8 +541,9 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
|||||||
// they should take precedence over the queries in the config file
|
// they should take precedence over the queries in the config file
|
||||||
// unless they're prefixed with "+", in which case they supplement those
|
// unless they're prefixed with "+", in which case they supplement those
|
||||||
// in the config file.
|
// in the config file.
|
||||||
|
let injectedMlQueries = false;
|
||||||
if (queriesInput) {
|
if (queriesInput) {
|
||||||
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
injectedMlQueries = await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||||
}
|
}
|
||||||
if (shouldAddConfigFileQueries(queriesInput) &&
|
if (shouldAddConfigFileQueries(queriesInput) &&
|
||||||
QUERIES_PROPERTY in parsedYAML) {
|
QUERIES_PROPERTY in parsedYAML) {
|
||||||
@@ -578,6 +596,7 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
|||||||
debugMode,
|
debugMode,
|
||||||
debugArtifactName,
|
debugArtifactName,
|
||||||
debugDatabaseName,
|
debugDatabaseName,
|
||||||
|
injectedMlQueries,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
18
lib/config-utils.test.js
generated
18
lib/config-utils.test.js
generated
@@ -221,6 +221,7 @@ function mockListLanguages(languages) {
|
|||||||
debugMode: false,
|
debugMode: false,
|
||||||
debugArtifactName: "my-artifact",
|
debugArtifactName: "my-artifact",
|
||||||
debugDatabaseName: "my-db",
|
debugDatabaseName: "my-db",
|
||||||
|
injectedMlQueries: false,
|
||||||
};
|
};
|
||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||||
@@ -910,11 +911,20 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
|||||||
? `${expectedVersionString} are`
|
? `${expectedVersionString} are`
|
||||||
: "aren't"} loaded for packs: ${packsInput}, queries: ${queriesInput} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`,
|
: "aren't"} loaded for packs: ${packsInput}, queries: ${queriesInput} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`,
|
||||||
});
|
});
|
||||||
// macro, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, versionString
|
// macro, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString
|
||||||
|
// Test that ML-powered queries aren't run on v2.7.4 of the CLI.
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, undefined, "security-extended", undefined);
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, undefined, "security-extended", undefined);
|
||||||
|
// Test that ML-powered queries aren't run when the feature flag is off.
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined);
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined);
|
||||||
|
// Test that ML-powered queries aren't run when the user hasn't specified that we should run the
|
||||||
|
// `security-extended` or `security-and-quality` query suite.
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-extended", "~0.1.0");
|
// Test that ML-powered queries are run on non-Windows platforms running `security-extended`.
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-and-quality", "~0.1.0");
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.1.0");
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", "0.0.1");
|
// Test that ML-powered queries are run on non-Windows platforms running `security-and-quality`.
|
||||||
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-and-quality", process.platform === "win32" ? undefined : "~0.1.0");
|
||||||
|
// Test that we don't inject an ML-powered query pack if the user has already specified one.
|
||||||
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", process.platform === "win32" ? undefined : "0.0.1");
|
||||||
|
// Test that the ~0.2.0 version of ML-powered queries is run on v2.8.4 of the CLI.
|
||||||
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.4", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.2.0");
|
||||||
//# sourceMappingURL=config-utils.test.js.map
|
//# sourceMappingURL=config-utils.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
1
lib/database-upload.test.js
generated
1
lib/database-upload.test.js
generated
@@ -58,6 +58,7 @@ function getTestConfig(tmpDir) {
|
|||||||
debugMode: false,
|
debugMode: false,
|
||||||
debugArtifactName: util_1.DEFAULT_DEBUG_ARTIFACT_NAME,
|
debugArtifactName: util_1.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||||
debugDatabaseName: util_1.DEFAULT_DEBUG_DATABASE_NAME,
|
debugDatabaseName: util_1.DEFAULT_DEBUG_DATABASE_NAME,
|
||||||
|
injectedMlQueries: false,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
async function mockHttpRequests(databaseUploadStatusCode) {
|
async function mockHttpRequests(databaseUploadStatusCode) {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -1,3 +1,3 @@
|
|||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-20220311"
|
"bundleVersion": "codeql-bundle-20220421"
|
||||||
}
|
}
|
||||||
|
|||||||
1
lib/testing-utils.js
generated
1
lib/testing-utils.js
generated
@@ -90,6 +90,7 @@ exports.setupTests = setupTests;
|
|||||||
function setupActionsVars(tempDir, toolsDir) {
|
function setupActionsVars(tempDir, toolsDir) {
|
||||||
process.env["RUNNER_TEMP"] = tempDir;
|
process.env["RUNNER_TEMP"] = tempDir;
|
||||||
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
|
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
|
||||||
|
process.env["GITHUB_WORKSPACE"] = tempDir;
|
||||||
}
|
}
|
||||||
exports.setupActionsVars = setupActionsVars;
|
exports.setupActionsVars = setupActionsVars;
|
||||||
function getRecordingLogger(messages) {
|
function getRecordingLogger(messages) {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}
|
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;IAC5C,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,OAAO,CAAC;AAC5C,CAAC;AAJD,4CAIC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}
|
||||||
1
lib/tracer-config.test.js
generated
1
lib/tracer-config.test.js
generated
@@ -47,6 +47,7 @@ function getTestConfig(tmpDir) {
|
|||||||
debugMode: false,
|
debugMode: false,
|
||||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||||
|
injectedMlQueries: false,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
// A very minimal setup
|
// A very minimal setup
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
23
lib/upload-lib.js
generated
23
lib/upload-lib.js
generated
@@ -95,7 +95,10 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
|
|||||||
// If in test mode we don't want to upload the results
|
// If in test mode we don't want to upload the results
|
||||||
const testMode = process.env["TEST_MODE"] === "true" || false;
|
const testMode = process.env["TEST_MODE"] === "true" || false;
|
||||||
if (testMode) {
|
if (testMode) {
|
||||||
logger.debug("In test mode. Results are not uploaded.");
|
const payloadSaveFile = path.join(actionsUtil.getTemporaryDirectory(), "payload.json");
|
||||||
|
logger.info(`In test mode. Results are not uploaded. Saving to ${payloadSaveFile}`);
|
||||||
|
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
|
||||||
|
fs.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const client = api.getApiClient(apiDetails);
|
const client = api.getApiClient(apiDetails);
|
||||||
@@ -134,7 +137,7 @@ exports.findSarifFilesInDir = findSarifFilesInDir;
|
|||||||
// depending on what the path happens to refer to.
|
// depending on what the path happens to refer to.
|
||||||
// Returns true iff the upload occurred and succeeded
|
// Returns true iff the upload occurred and succeeded
|
||||||
async function uploadFromActions(sarifPath, gitHubVersion, apiDetails, logger) {
|
async function uploadFromActions(sarifPath, gitHubVersion, apiDetails, logger) {
|
||||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, apiDetails, logger);
|
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(actionsUtil.getRequiredInput("checkout_path")), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, apiDetails, logger);
|
||||||
}
|
}
|
||||||
exports.uploadFromActions = uploadFromActions;
|
exports.uploadFromActions = uploadFromActions;
|
||||||
// Uploads a single sarif file or a directory of sarif files
|
// Uploads a single sarif file or a directory of sarif files
|
||||||
@@ -319,23 +322,15 @@ async function waitForProcessing(repositoryNwo, sarifID, apiDetails, logger) {
|
|||||||
if (status === "complete") {
|
if (status === "complete") {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
else if (status === "pending") {
|
||||||
|
logger.debug("Analysis processing is still pending...");
|
||||||
|
}
|
||||||
else if (status === "failed") {
|
else if (status === "failed") {
|
||||||
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
|
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
if (util.isHTTPError(e)) {
|
logger.warning(`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`);
|
||||||
switch (e.status) {
|
|
||||||
case 404:
|
|
||||||
logger.debug("Analysis is not found yet...");
|
|
||||||
break; // Note this breaks from the case statement, not the outer loop.
|
|
||||||
default:
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
55
lib/util.js
generated
55
lib/util.js
generated
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getMlPoweredJsQueriesStatus = exports.ML_POWERED_JS_QUERIES_PACK = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isGitHubGhesVersionBelow = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isGitHubGhesVersionBelow = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
@@ -545,24 +545,26 @@ function isGoodVersion(versionSpec) {
|
|||||||
return !BROKEN_VERSIONS.includes(versionSpec);
|
return !BROKEN_VERSIONS.includes(versionSpec);
|
||||||
}
|
}
|
||||||
exports.isGoodVersion = isGoodVersion;
|
exports.isGoodVersion = isGoodVersion;
|
||||||
|
exports.ML_POWERED_JS_QUERIES_PACK_NAME = "codeql/javascript-experimental-atm-queries";
|
||||||
/**
|
/**
|
||||||
* The ML-powered JS query pack to add to the analysis if a repo is opted into the ML-powered
|
* Gets the ML-powered JS query pack to add to the analysis if a repo is opted into the ML-powered
|
||||||
* queries beta.
|
* queries beta.
|
||||||
*/
|
*/
|
||||||
exports.ML_POWERED_JS_QUERIES_PACK = {
|
async function getMlPoweredJsQueriesPack(codeQL) {
|
||||||
packName: "codeql/javascript-experimental-atm-queries",
|
if (await codeQlVersionAbove(codeQL, "2.8.4")) {
|
||||||
version: "~0.1.0",
|
return { packName: exports.ML_POWERED_JS_QUERIES_PACK_NAME, version: "~0.2.0" };
|
||||||
};
|
}
|
||||||
|
return { packName: exports.ML_POWERED_JS_QUERIES_PACK_NAME, version: "~0.1.0" };
|
||||||
|
}
|
||||||
|
exports.getMlPoweredJsQueriesPack = getMlPoweredJsQueriesPack;
|
||||||
/**
|
/**
|
||||||
* Get information about ML-powered JS queries to populate status reports with.
|
* Get information about ML-powered JS queries to populate status reports with.
|
||||||
*
|
*
|
||||||
* This will be:
|
* This will be:
|
||||||
*
|
*
|
||||||
* - The version string if the analysis is using the ML-powered query pack that will be added to the
|
* - The version string if the analysis is using a single version of the ML-powered query pack.
|
||||||
* analysis if the repo is opted into the ML-powered queries beta, i.e.
|
* - "latest" if the version string of the ML-powered query pack is undefined. This is unlikely to
|
||||||
* {@link ML_POWERED_JS_QUERIES_PACK.version}. If the version string
|
* occur in practice (see comment below).
|
||||||
* {@link ML_POWERED_JS_QUERIES_PACK.version} is undefined, then the status report string will be
|
|
||||||
* "latest", however this shouldn't occur in practice (see comment below).
|
|
||||||
* - "false" if the analysis won't run any ML-powered JS queries.
|
* - "false" if the analysis won't run any ML-powered JS queries.
|
||||||
* - "other" in all other cases.
|
* - "other" in all other cases.
|
||||||
*
|
*
|
||||||
@@ -572,30 +574,25 @@ exports.ML_POWERED_JS_QUERIES_PACK = {
|
|||||||
* version of the CodeQL Action. For instance, we might want to compare the `~0.1.0` and `~0.0.2`
|
* version of the CodeQL Action. For instance, we might want to compare the `~0.1.0` and `~0.0.2`
|
||||||
* version strings.
|
* version strings.
|
||||||
*
|
*
|
||||||
* We restrict the set of strings we report here by excluding other version strings and combinations
|
|
||||||
* of version strings. We do this to limit the cardinality of the ML-powered JS queries status
|
|
||||||
* report field, since some platforms that ingest this status report bill based on the cardinality
|
|
||||||
* of its fields.
|
|
||||||
*
|
|
||||||
* This function lives here rather than in `init-action.ts` so it's easier to test, since tests for
|
* This function lives here rather than in `init-action.ts` so it's easier to test, since tests for
|
||||||
* `init-action.ts` would each need to live in their own file. See `analyze-action-env.ts` for an
|
* `init-action.ts` would each need to live in their own file. See `analyze-action-env.ts` for an
|
||||||
* explanation as to why this is.
|
* explanation as to why this is.
|
||||||
*/
|
*/
|
||||||
function getMlPoweredJsQueriesStatus(config) {
|
function getMlPoweredJsQueriesStatus(config) {
|
||||||
const mlPoweredJsQueryPacks = (config.packs.javascript || []).filter((pack) => pack.packName === exports.ML_POWERED_JS_QUERIES_PACK.packName);
|
const mlPoweredJsQueryPacks = (config.packs.javascript || []).filter((pack) => pack.packName === exports.ML_POWERED_JS_QUERIES_PACK_NAME);
|
||||||
if (mlPoweredJsQueryPacks.length === 0) {
|
switch (mlPoweredJsQueryPacks.length) {
|
||||||
return "false";
|
case 1:
|
||||||
|
// We should always specify an explicit version string in `getMlPoweredJsQueriesPack`,
|
||||||
|
// otherwise we won't be able to make changes to the pack unless those changes are compatible
|
||||||
|
// with each version of the CodeQL Action. Therefore in practice we should only hit the
|
||||||
|
// `latest` case here when customers have explicitly added the ML-powered query pack to their
|
||||||
|
// CodeQL config.
|
||||||
|
return mlPoweredJsQueryPacks[0].version || "latest";
|
||||||
|
case 0:
|
||||||
|
return "false";
|
||||||
|
default:
|
||||||
|
return "other";
|
||||||
}
|
}
|
||||||
const firstVersionString = mlPoweredJsQueryPacks[0].version;
|
|
||||||
if (mlPoweredJsQueryPacks.length === 1 &&
|
|
||||||
exports.ML_POWERED_JS_QUERIES_PACK.version === firstVersionString) {
|
|
||||||
// We should always specify an explicit version string in `ML_POWERED_JS_QUERIES_PACK`,
|
|
||||||
// otherwise we won't be able to make changes to the pack unless those changes are compatible
|
|
||||||
// with each version of the CodeQL Action. Therefore in practice, we should never hit the
|
|
||||||
// `latest` case here.
|
|
||||||
return exports.ML_POWERED_JS_QUERIES_PACK.version || "latest";
|
|
||||||
}
|
|
||||||
return "other";
|
|
||||||
}
|
}
|
||||||
exports.getMlPoweredJsQueriesStatus = getMlPoweredJsQueriesStatus;
|
exports.getMlPoweredJsQueriesStatus = getMlPoweredJsQueriesStatus;
|
||||||
//# sourceMappingURL=util.js.map
|
//# sourceMappingURL=util.js.map
|
||||||
File diff suppressed because one or more lines are too long
44
lib/util.test.js
generated
44
lib/util.test.js
generated
@@ -205,32 +205,43 @@ async function mockStdInForAuthExpectError(t, mockLogger, ...text) {
|
|||||||
await t.throwsAsync(async () => util.getGitHubAuth(mockLogger, undefined, true, stdin));
|
await t.throwsAsync(async () => util.getGitHubAuth(mockLogger, undefined, true, stdin));
|
||||||
}
|
}
|
||||||
const ML_POWERED_JS_STATUS_TESTS = [
|
const ML_POWERED_JS_STATUS_TESTS = [
|
||||||
|
// If no packs are loaded, status is false.
|
||||||
[[], "false"],
|
[[], "false"],
|
||||||
|
// If another pack is loaded but not the ML-powered query pack, status is false.
|
||||||
[[{ packName: "someOtherPack" }], "false"],
|
[[{ packName: "someOtherPack" }], "false"],
|
||||||
|
// If the ML-powered query pack is loaded with a specific version, status is that version.
|
||||||
[
|
[
|
||||||
[{ packName: "someOtherPack" }, util.ML_POWERED_JS_QUERIES_PACK],
|
[{ packName: util.ML_POWERED_JS_QUERIES_PACK_NAME, version: "~0.1.0" }],
|
||||||
util.ML_POWERED_JS_QUERIES_PACK.version,
|
"~0.1.0",
|
||||||
],
|
|
||||||
[[util.ML_POWERED_JS_QUERIES_PACK], util.ML_POWERED_JS_QUERIES_PACK.version],
|
|
||||||
[[{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName }], "other"],
|
|
||||||
[
|
|
||||||
[{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName, version: "~0.0.1" }],
|
|
||||||
"other",
|
|
||||||
],
|
|
||||||
[
|
|
||||||
[
|
|
||||||
{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName, version: "0.0.1" },
|
|
||||||
{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName, version: "0.0.2" },
|
|
||||||
],
|
|
||||||
"other",
|
|
||||||
],
|
],
|
||||||
|
// If the ML-powered query pack is loaded with a specific version and another pack is loaded, the
|
||||||
|
// status is the version of the ML-powered query pack.
|
||||||
[
|
[
|
||||||
[
|
[
|
||||||
{ packName: "someOtherPack" },
|
{ packName: "someOtherPack" },
|
||||||
{ packName: util.ML_POWERED_JS_QUERIES_PACK.packName },
|
{ packName: util.ML_POWERED_JS_QUERIES_PACK_NAME, version: "~0.1.0" },
|
||||||
|
],
|
||||||
|
"~0.1.0",
|
||||||
|
],
|
||||||
|
// If the ML-powered query pack is loaded without a version, the status is "latest".
|
||||||
|
[[{ packName: util.ML_POWERED_JS_QUERIES_PACK_NAME }], "latest"],
|
||||||
|
// If the ML-powered query pack is loaded with two different versions, the status is "other".
|
||||||
|
[
|
||||||
|
[
|
||||||
|
{ packName: util.ML_POWERED_JS_QUERIES_PACK_NAME, version: "0.0.1" },
|
||||||
|
{ packName: util.ML_POWERED_JS_QUERIES_PACK_NAME, version: "0.0.2" },
|
||||||
],
|
],
|
||||||
"other",
|
"other",
|
||||||
],
|
],
|
||||||
|
// If the ML-powered query pack is loaded with no specific version, and another pack is loaded,
|
||||||
|
// the status is "latest".
|
||||||
|
[
|
||||||
|
[
|
||||||
|
{ packName: "someOtherPack" },
|
||||||
|
{ packName: util.ML_POWERED_JS_QUERIES_PACK_NAME },
|
||||||
|
],
|
||||||
|
"latest",
|
||||||
|
],
|
||||||
];
|
];
|
||||||
for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
||||||
const packDescriptions = `[${packs
|
const packDescriptions = `[${packs
|
||||||
@@ -257,6 +268,7 @@ for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
|||||||
debugMode: false,
|
debugMode: false,
|
||||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||||
|
injectedMlQueries: false,
|
||||||
};
|
};
|
||||||
t.is(util.getMlPoweredJsQueriesStatus(config), expectedStatus);
|
t.is(util.getMlPoweredJsQueriesStatus(config), expectedStatus);
|
||||||
});
|
});
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
92
node_modules/.package-lock.json
generated
vendored
92
node_modules/.package-lock.json
generated
vendored
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "1.1.6",
|
"version": "1.1.9",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
@@ -893,6 +893,14 @@
|
|||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/array-uniq": {
|
||||||
|
"version": "1.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz",
|
||||||
|
"integrity": "sha1-r2rId6Jcx/dOBYiUdThY39sk/bY=",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/array.prototype.flat": {
|
"node_modules/array.prototype.flat": {
|
||||||
"version": "1.2.4",
|
"version": "1.2.4",
|
||||||
"resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz",
|
"resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz",
|
||||||
@@ -2788,10 +2796,40 @@
|
|||||||
"loc": "dist/cli.js"
|
"loc": "dist/cli.js"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/github-linguist/node_modules/array-union": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz",
|
||||||
|
"integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=",
|
||||||
|
"dependencies": {
|
||||||
|
"array-uniq": "^1.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/github-linguist/node_modules/commander": {
|
"node_modules/github-linguist/node_modules/commander": {
|
||||||
"version": "2.20.3",
|
"version": "2.20.3",
|
||||||
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="
|
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="
|
||||||
},
|
},
|
||||||
|
"node_modules/github-linguist/node_modules/glob": {
|
||||||
|
"version": "7.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz",
|
||||||
|
"integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==",
|
||||||
|
"dependencies": {
|
||||||
|
"fs.realpath": "^1.0.0",
|
||||||
|
"inflight": "^1.0.4",
|
||||||
|
"inherits": "2",
|
||||||
|
"minimatch": "^3.0.4",
|
||||||
|
"once": "^1.3.0",
|
||||||
|
"path-is-absolute": "^1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "*"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/github-linguist/node_modules/globby": {
|
"node_modules/github-linguist/node_modules/globby": {
|
||||||
"version": "6.1.0",
|
"version": "6.1.0",
|
||||||
"integrity": "sha1-9abXDoOV4hyFj7BInWTfAkJNUGw=",
|
"integrity": "sha1-9abXDoOV4hyFj7BInWTfAkJNUGw=",
|
||||||
@@ -2814,18 +2852,19 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/glob": {
|
"node_modules/glob": {
|
||||||
"version": "7.1.7",
|
"version": "8.0.1",
|
||||||
"integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==",
|
"resolved": "https://registry.npmjs.org/glob/-/glob-8.0.1.tgz",
|
||||||
|
"integrity": "sha512-cF7FYZZ47YzmCu7dDy50xSRRfO3ErRfrXuLZcNIuyiJEco0XSrGtuilG19L5xp3NcwTx7Gn+X6Tv3fmsUPTbow==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"fs.realpath": "^1.0.0",
|
"fs.realpath": "^1.0.0",
|
||||||
"inflight": "^1.0.4",
|
"inflight": "^1.0.4",
|
||||||
"inherits": "2",
|
"inherits": "2",
|
||||||
"minimatch": "^3.0.4",
|
"minimatch": "^5.0.1",
|
||||||
"once": "^1.3.0",
|
"once": "^1.3.0",
|
||||||
"path-is-absolute": "^1.0.0"
|
"path-is-absolute": "^1.0.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "*"
|
"node": ">=12"
|
||||||
},
|
},
|
||||||
"funding": {
|
"funding": {
|
||||||
"url": "https://github.com/sponsors/isaacs"
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
@@ -2842,6 +2881,25 @@
|
|||||||
"node": ">= 6"
|
"node": ">= 6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/glob/node_modules/brace-expansion": {
|
||||||
|
"version": "2.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
||||||
|
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
||||||
|
"dependencies": {
|
||||||
|
"balanced-match": "^1.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/glob/node_modules/minimatch": {
|
||||||
|
"version": "5.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz",
|
||||||
|
"integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==",
|
||||||
|
"dependencies": {
|
||||||
|
"brace-expansion": "^2.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/globals": {
|
"node_modules/globals": {
|
||||||
"version": "13.10.0",
|
"version": "13.10.0",
|
||||||
"resolved": "https://registry.npmjs.org/globals/-/globals-13.10.0.tgz",
|
"resolved": "https://registry.npmjs.org/globals/-/globals-13.10.0.tgz",
|
||||||
@@ -3593,8 +3651,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/minimist": {
|
"node_modules/minimist": {
|
||||||
"version": "1.2.5",
|
"version": "1.2.6",
|
||||||
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
|
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
|
||||||
|
"integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"node_modules/ms": {
|
"node_modules/ms": {
|
||||||
@@ -4448,6 +4507,25 @@
|
|||||||
"url": "https://github.com/sponsors/isaacs"
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/rimraf/node_modules/glob": {
|
||||||
|
"version": "7.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz",
|
||||||
|
"integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==",
|
||||||
|
"dependencies": {
|
||||||
|
"fs.realpath": "^1.0.0",
|
||||||
|
"inflight": "^1.0.4",
|
||||||
|
"inherits": "2",
|
||||||
|
"minimatch": "^3.0.4",
|
||||||
|
"once": "^1.3.0",
|
||||||
|
"path-is-absolute": "^1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "*"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/run-parallel": {
|
"node_modules/run-parallel": {
|
||||||
"version": "1.1.9",
|
"version": "1.1.9",
|
||||||
"integrity": "sha512-DEqnSRTDw/Tc3FXf49zedI638Z9onwUotBMiUFKmrO2sdFKIbXamXGQ3Axd4qgphxKB4kw/qP1w5kTxnfU1B9Q=="
|
"integrity": "sha512-DEqnSRTDw/Tc3FXf49zedI638Z9onwUotBMiUFKmrO2sdFKIbXamXGQ3Axd4qgphxKB4kw/qP1w5kTxnfU1B9Q=="
|
||||||
|
|||||||
62
node_modules/array-uniq/index.js
generated
vendored
Normal file
62
node_modules/array-uniq/index.js
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
// there's 3 implementations written in increasing order of efficiency
|
||||||
|
|
||||||
|
// 1 - no Set type is defined
|
||||||
|
function uniqNoSet(arr) {
|
||||||
|
var ret = [];
|
||||||
|
|
||||||
|
for (var i = 0; i < arr.length; i++) {
|
||||||
|
if (ret.indexOf(arr[i]) === -1) {
|
||||||
|
ret.push(arr[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2 - a simple Set type is defined
|
||||||
|
function uniqSet(arr) {
|
||||||
|
var seen = new Set();
|
||||||
|
return arr.filter(function (el) {
|
||||||
|
if (!seen.has(el)) {
|
||||||
|
seen.add(el);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3 - a standard Set type is defined and it has a forEach method
|
||||||
|
function uniqSetWithForEach(arr) {
|
||||||
|
var ret = [];
|
||||||
|
|
||||||
|
(new Set(arr)).forEach(function (el) {
|
||||||
|
ret.push(el);
|
||||||
|
});
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
// V8 currently has a broken implementation
|
||||||
|
// https://github.com/joyent/node/issues/8449
|
||||||
|
function doesForEachActuallyWork() {
|
||||||
|
var ret = false;
|
||||||
|
|
||||||
|
(new Set([true])).forEach(function (el) {
|
||||||
|
ret = el;
|
||||||
|
});
|
||||||
|
|
||||||
|
return ret === true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ('Set' in global) {
|
||||||
|
if (typeof Set.prototype.forEach === 'function' && doesForEachActuallyWork()) {
|
||||||
|
module.exports = uniqSetWithForEach;
|
||||||
|
} else {
|
||||||
|
module.exports = uniqSet;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
module.exports = uniqNoSet;
|
||||||
|
}
|
||||||
21
node_modules/array-uniq/license
generated
vendored
Normal file
21
node_modules/array-uniq/license
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
37
node_modules/array-uniq/package.json
generated
vendored
Normal file
37
node_modules/array-uniq/package.json
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"name": "array-uniq",
|
||||||
|
"version": "1.0.3",
|
||||||
|
"description": "Create an array without duplicates",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "sindresorhus/array-uniq",
|
||||||
|
"author": {
|
||||||
|
"name": "Sindre Sorhus",
|
||||||
|
"email": "sindresorhus@gmail.com",
|
||||||
|
"url": "sindresorhus.com"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "xo && ava"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"array",
|
||||||
|
"arr",
|
||||||
|
"set",
|
||||||
|
"uniq",
|
||||||
|
"unique",
|
||||||
|
"es6",
|
||||||
|
"duplicate",
|
||||||
|
"remove"
|
||||||
|
],
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "*",
|
||||||
|
"es6-set": "^0.1.0",
|
||||||
|
"require-uncached": "^1.0.2",
|
||||||
|
"xo": "*"
|
||||||
|
}
|
||||||
|
}
|
||||||
30
node_modules/array-uniq/readme.md
generated
vendored
Normal file
30
node_modules/array-uniq/readme.md
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# array-uniq [](https://travis-ci.org/sindresorhus/array-uniq)
|
||||||
|
|
||||||
|
> Create an array without duplicates
|
||||||
|
|
||||||
|
It's already pretty fast, but will be much faster when [Set](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set) becomes available in V8 (especially with large arrays).
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install --save array-uniq
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
const arrayUniq = require('array-uniq');
|
||||||
|
|
||||||
|
arrayUniq([1, 1, 2, 3, 3]);
|
||||||
|
//=> [1, 2, 3]
|
||||||
|
|
||||||
|
arrayUniq(['foo', 'foo', 'bar', 'foo']);
|
||||||
|
//=> ['foo', 'bar']
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||||
6
node_modules/github-linguist/node_modules/array-union/index.js
generated
vendored
Normal file
6
node_modules/github-linguist/node_modules/array-union/index.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
'use strict';
|
||||||
|
var arrayUniq = require('array-uniq');
|
||||||
|
|
||||||
|
module.exports = function () {
|
||||||
|
return arrayUniq([].concat.apply([], arguments));
|
||||||
|
};
|
||||||
21
node_modules/github-linguist/node_modules/array-union/license
generated
vendored
Normal file
21
node_modules/github-linguist/node_modules/array-union/license
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
40
node_modules/github-linguist/node_modules/array-union/package.json
generated
vendored
Normal file
40
node_modules/github-linguist/node_modules/array-union/package.json
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
{
|
||||||
|
"name": "array-union",
|
||||||
|
"version": "1.0.2",
|
||||||
|
"description": "Create an array of unique values, in order, from the input arrays",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "sindresorhus/array-union",
|
||||||
|
"author": {
|
||||||
|
"name": "Sindre Sorhus",
|
||||||
|
"email": "sindresorhus@gmail.com",
|
||||||
|
"url": "sindresorhus.com"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "xo && ava"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"array",
|
||||||
|
"arr",
|
||||||
|
"set",
|
||||||
|
"uniq",
|
||||||
|
"unique",
|
||||||
|
"duplicate",
|
||||||
|
"remove",
|
||||||
|
"union",
|
||||||
|
"combine",
|
||||||
|
"merge"
|
||||||
|
],
|
||||||
|
"dependencies": {
|
||||||
|
"array-uniq": "^1.0.1"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "*",
|
||||||
|
"xo": "*"
|
||||||
|
}
|
||||||
|
}
|
||||||
28
node_modules/github-linguist/node_modules/array-union/readme.md
generated
vendored
Normal file
28
node_modules/github-linguist/node_modules/array-union/readme.md
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# array-union [](https://travis-ci.org/sindresorhus/array-union)
|
||||||
|
|
||||||
|
> Create an array of unique values, in order, from the input arrays
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install --save array-union
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
const arrayUnion = require('array-union');
|
||||||
|
|
||||||
|
arrayUnion([1, 1, 2, 3], [2, 3]);
|
||||||
|
//=> [1, 2, 3]
|
||||||
|
|
||||||
|
arrayUnion(['foo', 'foo', 'bar'], ['foo']);
|
||||||
|
//=> ['foo', 'bar']
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||||
21
node_modules/github-linguist/node_modules/glob/LICENSE
generated
vendored
Normal file
21
node_modules/github-linguist/node_modules/glob/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The ISC License
|
||||||
|
|
||||||
|
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||||
|
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
## Glob Logo
|
||||||
|
|
||||||
|
Glob's logo created by Tanya Brassie <http://tanyabrassie.com/>, licensed
|
||||||
|
under a Creative Commons Attribution-ShareAlike 4.0 International License
|
||||||
|
https://creativecommons.org/licenses/by-sa/4.0/
|
||||||
378
node_modules/github-linguist/node_modules/glob/README.md
generated
vendored
Normal file
378
node_modules/github-linguist/node_modules/glob/README.md
generated
vendored
Normal file
@@ -0,0 +1,378 @@
|
|||||||
|
# Glob
|
||||||
|
|
||||||
|
Match files using the patterns the shell uses, like stars and stuff.
|
||||||
|
|
||||||
|
[](https://travis-ci.org/isaacs/node-glob/) [](https://ci.appveyor.com/project/isaacs/node-glob) [](https://coveralls.io/github/isaacs/node-glob?branch=master)
|
||||||
|
|
||||||
|
This is a glob implementation in JavaScript. It uses the `minimatch`
|
||||||
|
library to do its matching.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
Install with npm
|
||||||
|
|
||||||
|
```
|
||||||
|
npm i glob
|
||||||
|
```
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var glob = require("glob")
|
||||||
|
|
||||||
|
// options is optional
|
||||||
|
glob("**/*.js", options, function (er, files) {
|
||||||
|
// files is an array of filenames.
|
||||||
|
// If the `nonull` option is set, and nothing
|
||||||
|
// was found, then files is ["**/*.js"]
|
||||||
|
// er is an error object or null.
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Glob Primer
|
||||||
|
|
||||||
|
"Globs" are the patterns you type when you do stuff like `ls *.js` on
|
||||||
|
the command line, or put `build/*` in a `.gitignore` file.
|
||||||
|
|
||||||
|
Before parsing the path part patterns, braced sections are expanded
|
||||||
|
into a set. Braced sections start with `{` and end with `}`, with any
|
||||||
|
number of comma-delimited sections within. Braced sections may contain
|
||||||
|
slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`.
|
||||||
|
|
||||||
|
The following characters have special magic meaning when used in a
|
||||||
|
path portion:
|
||||||
|
|
||||||
|
* `*` Matches 0 or more characters in a single path portion
|
||||||
|
* `?` Matches 1 character
|
||||||
|
* `[...]` Matches a range of characters, similar to a RegExp range.
|
||||||
|
If the first character of the range is `!` or `^` then it matches
|
||||||
|
any character not in the range.
|
||||||
|
* `!(pattern|pattern|pattern)` Matches anything that does not match
|
||||||
|
any of the patterns provided.
|
||||||
|
* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the
|
||||||
|
patterns provided.
|
||||||
|
* `+(pattern|pattern|pattern)` Matches one or more occurrences of the
|
||||||
|
patterns provided.
|
||||||
|
* `*(a|b|c)` Matches zero or more occurrences of the patterns provided
|
||||||
|
* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns
|
||||||
|
provided
|
||||||
|
* `**` If a "globstar" is alone in a path portion, then it matches
|
||||||
|
zero or more directories and subdirectories searching for matches.
|
||||||
|
It does not crawl symlinked directories.
|
||||||
|
|
||||||
|
### Dots
|
||||||
|
|
||||||
|
If a file or directory path portion has a `.` as the first character,
|
||||||
|
then it will not match any glob pattern unless that pattern's
|
||||||
|
corresponding path part also has a `.` as its first character.
|
||||||
|
|
||||||
|
For example, the pattern `a/.*/c` would match the file at `a/.b/c`.
|
||||||
|
However the pattern `a/*/c` would not, because `*` does not start with
|
||||||
|
a dot character.
|
||||||
|
|
||||||
|
You can make glob treat dots as normal characters by setting
|
||||||
|
`dot:true` in the options.
|
||||||
|
|
||||||
|
### Basename Matching
|
||||||
|
|
||||||
|
If you set `matchBase:true` in the options, and the pattern has no
|
||||||
|
slashes in it, then it will seek for any file anywhere in the tree
|
||||||
|
with a matching basename. For example, `*.js` would match
|
||||||
|
`test/simple/basic.js`.
|
||||||
|
|
||||||
|
### Empty Sets
|
||||||
|
|
||||||
|
If no matching files are found, then an empty array is returned. This
|
||||||
|
differs from the shell, where the pattern itself is returned. For
|
||||||
|
example:
|
||||||
|
|
||||||
|
$ echo a*s*d*f
|
||||||
|
a*s*d*f
|
||||||
|
|
||||||
|
To get the bash-style behavior, set the `nonull:true` in the options.
|
||||||
|
|
||||||
|
### See Also:
|
||||||
|
|
||||||
|
* `man sh`
|
||||||
|
* `man bash` (Search for "Pattern Matching")
|
||||||
|
* `man 3 fnmatch`
|
||||||
|
* `man 5 gitignore`
|
||||||
|
* [minimatch documentation](https://github.com/isaacs/minimatch)
|
||||||
|
|
||||||
|
## glob.hasMagic(pattern, [options])
|
||||||
|
|
||||||
|
Returns `true` if there are any special characters in the pattern, and
|
||||||
|
`false` otherwise.
|
||||||
|
|
||||||
|
Note that the options affect the results. If `noext:true` is set in
|
||||||
|
the options object, then `+(a|b)` will not be considered a magic
|
||||||
|
pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}`
|
||||||
|
then that is considered magical, unless `nobrace:true` is set in the
|
||||||
|
options.
|
||||||
|
|
||||||
|
## glob(pattern, [options], cb)
|
||||||
|
|
||||||
|
* `pattern` `{String}` Pattern to be matched
|
||||||
|
* `options` `{Object}`
|
||||||
|
* `cb` `{Function}`
|
||||||
|
* `err` `{Error | null}`
|
||||||
|
* `matches` `{Array<String>}` filenames found matching the pattern
|
||||||
|
|
||||||
|
Perform an asynchronous glob search.
|
||||||
|
|
||||||
|
## glob.sync(pattern, [options])
|
||||||
|
|
||||||
|
* `pattern` `{String}` Pattern to be matched
|
||||||
|
* `options` `{Object}`
|
||||||
|
* return: `{Array<String>}` filenames found matching the pattern
|
||||||
|
|
||||||
|
Perform a synchronous glob search.
|
||||||
|
|
||||||
|
## Class: glob.Glob
|
||||||
|
|
||||||
|
Create a Glob object by instantiating the `glob.Glob` class.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var Glob = require("glob").Glob
|
||||||
|
var mg = new Glob(pattern, options, cb)
|
||||||
|
```
|
||||||
|
|
||||||
|
It's an EventEmitter, and starts walking the filesystem to find matches
|
||||||
|
immediately.
|
||||||
|
|
||||||
|
### new glob.Glob(pattern, [options], [cb])
|
||||||
|
|
||||||
|
* `pattern` `{String}` pattern to search for
|
||||||
|
* `options` `{Object}`
|
||||||
|
* `cb` `{Function}` Called when an error occurs, or matches are found
|
||||||
|
* `err` `{Error | null}`
|
||||||
|
* `matches` `{Array<String>}` filenames found matching the pattern
|
||||||
|
|
||||||
|
Note that if the `sync` flag is set in the options, then matches will
|
||||||
|
be immediately available on the `g.found` member.
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
* `minimatch` The minimatch object that the glob uses.
|
||||||
|
* `options` The options object passed in.
|
||||||
|
* `aborted` Boolean which is set to true when calling `abort()`. There
|
||||||
|
is no way at this time to continue a glob search after aborting, but
|
||||||
|
you can re-use the statCache to avoid having to duplicate syscalls.
|
||||||
|
* `cache` Convenience object. Each field has the following possible
|
||||||
|
values:
|
||||||
|
* `false` - Path does not exist
|
||||||
|
* `true` - Path exists
|
||||||
|
* `'FILE'` - Path exists, and is not a directory
|
||||||
|
* `'DIR'` - Path exists, and is a directory
|
||||||
|
* `[file, entries, ...]` - Path exists, is a directory, and the
|
||||||
|
array value is the results of `fs.readdir`
|
||||||
|
* `statCache` Cache of `fs.stat` results, to prevent statting the same
|
||||||
|
path multiple times.
|
||||||
|
* `symlinks` A record of which paths are symbolic links, which is
|
||||||
|
relevant in resolving `**` patterns.
|
||||||
|
* `realpathCache` An optional object which is passed to `fs.realpath`
|
||||||
|
to minimize unnecessary syscalls. It is stored on the instantiated
|
||||||
|
Glob object, and may be re-used.
|
||||||
|
|
||||||
|
### Events
|
||||||
|
|
||||||
|
* `end` When the matching is finished, this is emitted with all the
|
||||||
|
matches found. If the `nonull` option is set, and no match was found,
|
||||||
|
then the `matches` list contains the original pattern. The matches
|
||||||
|
are sorted, unless the `nosort` flag is set.
|
||||||
|
* `match` Every time a match is found, this is emitted with the specific
|
||||||
|
thing that matched. It is not deduplicated or resolved to a realpath.
|
||||||
|
* `error` Emitted when an unexpected error is encountered, or whenever
|
||||||
|
any fs error occurs if `options.strict` is set.
|
||||||
|
* `abort` When `abort()` is called, this event is raised.
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
|
||||||
|
* `pause` Temporarily stop the search
|
||||||
|
* `resume` Resume the search
|
||||||
|
* `abort` Stop the search forever
|
||||||
|
|
||||||
|
### Options
|
||||||
|
|
||||||
|
All the options that can be passed to Minimatch can also be passed to
|
||||||
|
Glob to change pattern matching behavior. Also, some have been added,
|
||||||
|
or have glob-specific ramifications.
|
||||||
|
|
||||||
|
All options are false by default, unless otherwise noted.
|
||||||
|
|
||||||
|
All options are added to the Glob object, as well.
|
||||||
|
|
||||||
|
If you are running many `glob` operations, you can pass a Glob object
|
||||||
|
as the `options` argument to a subsequent operation to shortcut some
|
||||||
|
`stat` and `readdir` calls. At the very least, you may pass in shared
|
||||||
|
`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that
|
||||||
|
parallel glob operations will be sped up by sharing information about
|
||||||
|
the filesystem.
|
||||||
|
|
||||||
|
* `cwd` The current working directory in which to search. Defaults
|
||||||
|
to `process.cwd()`.
|
||||||
|
* `root` The place where patterns starting with `/` will be mounted
|
||||||
|
onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix
|
||||||
|
systems, and `C:\` or some such on Windows.)
|
||||||
|
* `dot` Include `.dot` files in normal matches and `globstar` matches.
|
||||||
|
Note that an explicit dot in a portion of the pattern will always
|
||||||
|
match dot files.
|
||||||
|
* `nomount` By default, a pattern starting with a forward-slash will be
|
||||||
|
"mounted" onto the root setting, so that a valid filesystem path is
|
||||||
|
returned. Set this flag to disable that behavior.
|
||||||
|
* `mark` Add a `/` character to directory matches. Note that this
|
||||||
|
requires additional stat calls.
|
||||||
|
* `nosort` Don't sort the results.
|
||||||
|
* `stat` Set to true to stat *all* results. This reduces performance
|
||||||
|
somewhat, and is completely unnecessary, unless `readdir` is presumed
|
||||||
|
to be an untrustworthy indicator of file existence.
|
||||||
|
* `silent` When an unusual error is encountered when attempting to
|
||||||
|
read a directory, a warning will be printed to stderr. Set the
|
||||||
|
`silent` option to true to suppress these warnings.
|
||||||
|
* `strict` When an unusual error is encountered when attempting to
|
||||||
|
read a directory, the process will just continue on in search of
|
||||||
|
other matches. Set the `strict` option to raise an error in these
|
||||||
|
cases.
|
||||||
|
* `cache` See `cache` property above. Pass in a previously generated
|
||||||
|
cache object to save some fs calls.
|
||||||
|
* `statCache` A cache of results of filesystem information, to prevent
|
||||||
|
unnecessary stat calls. While it should not normally be necessary
|
||||||
|
to set this, you may pass the statCache from one glob() call to the
|
||||||
|
options object of another, if you know that the filesystem will not
|
||||||
|
change between calls. (See "Race Conditions" below.)
|
||||||
|
* `symlinks` A cache of known symbolic links. You may pass in a
|
||||||
|
previously generated `symlinks` object to save `lstat` calls when
|
||||||
|
resolving `**` matches.
|
||||||
|
* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead.
|
||||||
|
* `nounique` In some cases, brace-expanded patterns can result in the
|
||||||
|
same file showing up multiple times in the result set. By default,
|
||||||
|
this implementation prevents duplicates in the result set. Set this
|
||||||
|
flag to disable that behavior.
|
||||||
|
* `nonull` Set to never return an empty set, instead returning a set
|
||||||
|
containing the pattern itself. This is the default in glob(3).
|
||||||
|
* `debug` Set to enable debug logging in minimatch and glob.
|
||||||
|
* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||||
|
* `noglobstar` Do not match `**` against multiple filenames. (Ie,
|
||||||
|
treat it as a normal `*` instead.)
|
||||||
|
* `noext` Do not match `+(a|b)` "extglob" patterns.
|
||||||
|
* `nocase` Perform a case-insensitive match. Note: on
|
||||||
|
case-insensitive filesystems, non-magic patterns will match by
|
||||||
|
default, since `stat` and `readdir` will not raise errors.
|
||||||
|
* `matchBase` Perform a basename-only match if the pattern does not
|
||||||
|
contain any slash characters. That is, `*.js` would be treated as
|
||||||
|
equivalent to `**/*.js`, matching all js files in all directories.
|
||||||
|
* `nodir` Do not match directories, only files. (Note: to match
|
||||||
|
*only* directories, simply put a `/` at the end of the pattern.)
|
||||||
|
* `ignore` Add a pattern or an array of glob patterns to exclude matches.
|
||||||
|
Note: `ignore` patterns are *always* in `dot:true` mode, regardless
|
||||||
|
of any other settings.
|
||||||
|
* `follow` Follow symlinked directories when expanding `**` patterns.
|
||||||
|
Note that this can result in a lot of duplicate references in the
|
||||||
|
presence of cyclic links.
|
||||||
|
* `realpath` Set to true to call `fs.realpath` on all of the results.
|
||||||
|
In the case of a symlink that cannot be resolved, the full absolute
|
||||||
|
path to the matched entry is returned (though it will usually be a
|
||||||
|
broken symlink)
|
||||||
|
* `absolute` Set to true to always receive absolute paths for matched
|
||||||
|
files. Unlike `realpath`, this also affects the values returned in
|
||||||
|
the `match` event.
|
||||||
|
* `fs` File-system object with Node's `fs` API. By default, the built-in
|
||||||
|
`fs` module will be used. Set to a volume provided by a library like
|
||||||
|
`memfs` to avoid using the "real" file-system.
|
||||||
|
|
||||||
|
## Comparisons to other fnmatch/glob implementations
|
||||||
|
|
||||||
|
While strict compliance with the existing standards is a worthwhile
|
||||||
|
goal, some discrepancies exist between node-glob and other
|
||||||
|
implementations, and are intentional.
|
||||||
|
|
||||||
|
The double-star character `**` is supported by default, unless the
|
||||||
|
`noglobstar` flag is set. This is supported in the manner of bsdglob
|
||||||
|
and bash 4.3, where `**` only has special significance if it is the only
|
||||||
|
thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
|
||||||
|
`a/**b` will not.
|
||||||
|
|
||||||
|
Note that symlinked directories are not crawled as part of a `**`,
|
||||||
|
though their contents may match against subsequent portions of the
|
||||||
|
pattern. This prevents infinite loops and duplicates and the like.
|
||||||
|
|
||||||
|
If an escaped pattern has no matches, and the `nonull` flag is set,
|
||||||
|
then glob returns the pattern as-provided, rather than
|
||||||
|
interpreting the character escapes. For example,
|
||||||
|
`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
|
||||||
|
`"*a?"`. This is akin to setting the `nullglob` option in bash, except
|
||||||
|
that it does not resolve escaped pattern characters.
|
||||||
|
|
||||||
|
If brace expansion is not disabled, then it is performed before any
|
||||||
|
other interpretation of the glob pattern. Thus, a pattern like
|
||||||
|
`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
|
||||||
|
**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
|
||||||
|
checked for validity. Since those two are valid, matching proceeds.
|
||||||
|
|
||||||
|
### Comments and Negation
|
||||||
|
|
||||||
|
Previously, this module let you mark a pattern as a "comment" if it
|
||||||
|
started with a `#` character, or a "negated" pattern if it started
|
||||||
|
with a `!` character.
|
||||||
|
|
||||||
|
These options were deprecated in version 5, and removed in version 6.
|
||||||
|
|
||||||
|
To specify things that should not match, use the `ignore` option.
|
||||||
|
|
||||||
|
## Windows
|
||||||
|
|
||||||
|
**Please only use forward-slashes in glob expressions.**
|
||||||
|
|
||||||
|
Though windows uses either `/` or `\` as its path separator, only `/`
|
||||||
|
characters are used by this glob implementation. You must use
|
||||||
|
forward-slashes **only** in glob expressions. Back-slashes will always
|
||||||
|
be interpreted as escape characters, not path separators.
|
||||||
|
|
||||||
|
Results from absolute patterns such as `/foo/*` are mounted onto the
|
||||||
|
root setting using `path.join`. On windows, this will by default result
|
||||||
|
in `/foo/*` matching `C:\foo\bar.txt`.
|
||||||
|
|
||||||
|
## Race Conditions
|
||||||
|
|
||||||
|
Glob searching, by its very nature, is susceptible to race conditions,
|
||||||
|
since it relies on directory walking and such.
|
||||||
|
|
||||||
|
As a result, it is possible that a file that exists when glob looks for
|
||||||
|
it may have been deleted or modified by the time it returns the result.
|
||||||
|
|
||||||
|
As part of its internal implementation, this program caches all stat
|
||||||
|
and readdir calls that it makes, in order to cut down on system
|
||||||
|
overhead. However, this also makes it even more susceptible to races,
|
||||||
|
especially if the cache or statCache objects are reused between glob
|
||||||
|
calls.
|
||||||
|
|
||||||
|
Users are thus advised not to use a glob result as a guarantee of
|
||||||
|
filesystem state in the face of rapid changes. For the vast majority
|
||||||
|
of operations, this is never a problem.
|
||||||
|
|
||||||
|
## Glob Logo
|
||||||
|
Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo).
|
||||||
|
|
||||||
|
The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/).
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Any change to behavior (including bugfixes) must come with a test.
|
||||||
|
|
||||||
|
Patches that fail tests or reduce performance will be rejected.
|
||||||
|
|
||||||
|
```
|
||||||
|
# to run tests
|
||||||
|
npm test
|
||||||
|
|
||||||
|
# to re-generate test fixtures
|
||||||
|
npm run test-regen
|
||||||
|
|
||||||
|
# to benchmark against bash/zsh
|
||||||
|
npm run bench
|
||||||
|
|
||||||
|
# to profile javascript
|
||||||
|
npm run prof
|
||||||
|
```
|
||||||
|
|
||||||
|

|
||||||
236
node_modules/github-linguist/node_modules/glob/common.js
generated
vendored
Normal file
236
node_modules/github-linguist/node_modules/glob/common.js
generated
vendored
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
exports.setopts = setopts
|
||||||
|
exports.ownProp = ownProp
|
||||||
|
exports.makeAbs = makeAbs
|
||||||
|
exports.finish = finish
|
||||||
|
exports.mark = mark
|
||||||
|
exports.isIgnored = isIgnored
|
||||||
|
exports.childrenIgnored = childrenIgnored
|
||||||
|
|
||||||
|
function ownProp (obj, field) {
|
||||||
|
return Object.prototype.hasOwnProperty.call(obj, field)
|
||||||
|
}
|
||||||
|
|
||||||
|
var fs = require("fs")
|
||||||
|
var path = require("path")
|
||||||
|
var minimatch = require("minimatch")
|
||||||
|
var isAbsolute = require("path-is-absolute")
|
||||||
|
var Minimatch = minimatch.Minimatch
|
||||||
|
|
||||||
|
function alphasort (a, b) {
|
||||||
|
return a.localeCompare(b, 'en')
|
||||||
|
}
|
||||||
|
|
||||||
|
function setupIgnores (self, options) {
|
||||||
|
self.ignore = options.ignore || []
|
||||||
|
|
||||||
|
if (!Array.isArray(self.ignore))
|
||||||
|
self.ignore = [self.ignore]
|
||||||
|
|
||||||
|
if (self.ignore.length) {
|
||||||
|
self.ignore = self.ignore.map(ignoreMap)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ignore patterns are always in dot:true mode.
|
||||||
|
function ignoreMap (pattern) {
|
||||||
|
var gmatcher = null
|
||||||
|
if (pattern.slice(-3) === '/**') {
|
||||||
|
var gpattern = pattern.replace(/(\/\*\*)+$/, '')
|
||||||
|
gmatcher = new Minimatch(gpattern, { dot: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
matcher: new Minimatch(pattern, { dot: true }),
|
||||||
|
gmatcher: gmatcher
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function setopts (self, pattern, options) {
|
||||||
|
if (!options)
|
||||||
|
options = {}
|
||||||
|
|
||||||
|
// base-matching: just use globstar for that.
|
||||||
|
if (options.matchBase && -1 === pattern.indexOf("/")) {
|
||||||
|
if (options.noglobstar) {
|
||||||
|
throw new Error("base matching requires globstar")
|
||||||
|
}
|
||||||
|
pattern = "**/" + pattern
|
||||||
|
}
|
||||||
|
|
||||||
|
self.silent = !!options.silent
|
||||||
|
self.pattern = pattern
|
||||||
|
self.strict = options.strict !== false
|
||||||
|
self.realpath = !!options.realpath
|
||||||
|
self.realpathCache = options.realpathCache || Object.create(null)
|
||||||
|
self.follow = !!options.follow
|
||||||
|
self.dot = !!options.dot
|
||||||
|
self.mark = !!options.mark
|
||||||
|
self.nodir = !!options.nodir
|
||||||
|
if (self.nodir)
|
||||||
|
self.mark = true
|
||||||
|
self.sync = !!options.sync
|
||||||
|
self.nounique = !!options.nounique
|
||||||
|
self.nonull = !!options.nonull
|
||||||
|
self.nosort = !!options.nosort
|
||||||
|
self.nocase = !!options.nocase
|
||||||
|
self.stat = !!options.stat
|
||||||
|
self.noprocess = !!options.noprocess
|
||||||
|
self.absolute = !!options.absolute
|
||||||
|
self.fs = options.fs || fs
|
||||||
|
|
||||||
|
self.maxLength = options.maxLength || Infinity
|
||||||
|
self.cache = options.cache || Object.create(null)
|
||||||
|
self.statCache = options.statCache || Object.create(null)
|
||||||
|
self.symlinks = options.symlinks || Object.create(null)
|
||||||
|
|
||||||
|
setupIgnores(self, options)
|
||||||
|
|
||||||
|
self.changedCwd = false
|
||||||
|
var cwd = process.cwd()
|
||||||
|
if (!ownProp(options, "cwd"))
|
||||||
|
self.cwd = cwd
|
||||||
|
else {
|
||||||
|
self.cwd = path.resolve(options.cwd)
|
||||||
|
self.changedCwd = self.cwd !== cwd
|
||||||
|
}
|
||||||
|
|
||||||
|
self.root = options.root || path.resolve(self.cwd, "/")
|
||||||
|
self.root = path.resolve(self.root)
|
||||||
|
if (process.platform === "win32")
|
||||||
|
self.root = self.root.replace(/\\/g, "/")
|
||||||
|
|
||||||
|
// TODO: is an absolute `cwd` supposed to be resolved against `root`?
|
||||||
|
// e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
|
||||||
|
self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)
|
||||||
|
if (process.platform === "win32")
|
||||||
|
self.cwdAbs = self.cwdAbs.replace(/\\/g, "/")
|
||||||
|
self.nomount = !!options.nomount
|
||||||
|
|
||||||
|
// disable comments and negation in Minimatch.
|
||||||
|
// Note that they are not supported in Glob itself anyway.
|
||||||
|
options.nonegate = true
|
||||||
|
options.nocomment = true
|
||||||
|
|
||||||
|
self.minimatch = new Minimatch(pattern, options)
|
||||||
|
self.options = self.minimatch.options
|
||||||
|
}
|
||||||
|
|
||||||
|
function finish (self) {
|
||||||
|
var nou = self.nounique
|
||||||
|
var all = nou ? [] : Object.create(null)
|
||||||
|
|
||||||
|
for (var i = 0, l = self.matches.length; i < l; i ++) {
|
||||||
|
var matches = self.matches[i]
|
||||||
|
if (!matches || Object.keys(matches).length === 0) {
|
||||||
|
if (self.nonull) {
|
||||||
|
// do like the shell, and spit out the literal glob
|
||||||
|
var literal = self.minimatch.globSet[i]
|
||||||
|
if (nou)
|
||||||
|
all.push(literal)
|
||||||
|
else
|
||||||
|
all[literal] = true
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// had matches
|
||||||
|
var m = Object.keys(matches)
|
||||||
|
if (nou)
|
||||||
|
all.push.apply(all, m)
|
||||||
|
else
|
||||||
|
m.forEach(function (m) {
|
||||||
|
all[m] = true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!nou)
|
||||||
|
all = Object.keys(all)
|
||||||
|
|
||||||
|
if (!self.nosort)
|
||||||
|
all = all.sort(alphasort)
|
||||||
|
|
||||||
|
// at *some* point we statted all of these
|
||||||
|
if (self.mark) {
|
||||||
|
for (var i = 0; i < all.length; i++) {
|
||||||
|
all[i] = self._mark(all[i])
|
||||||
|
}
|
||||||
|
if (self.nodir) {
|
||||||
|
all = all.filter(function (e) {
|
||||||
|
var notDir = !(/\/$/.test(e))
|
||||||
|
var c = self.cache[e] || self.cache[makeAbs(self, e)]
|
||||||
|
if (notDir && c)
|
||||||
|
notDir = c !== 'DIR' && !Array.isArray(c)
|
||||||
|
return notDir
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (self.ignore.length)
|
||||||
|
all = all.filter(function(m) {
|
||||||
|
return !isIgnored(self, m)
|
||||||
|
})
|
||||||
|
|
||||||
|
self.found = all
|
||||||
|
}
|
||||||
|
|
||||||
|
function mark (self, p) {
|
||||||
|
var abs = makeAbs(self, p)
|
||||||
|
var c = self.cache[abs]
|
||||||
|
var m = p
|
||||||
|
if (c) {
|
||||||
|
var isDir = c === 'DIR' || Array.isArray(c)
|
||||||
|
var slash = p.slice(-1) === '/'
|
||||||
|
|
||||||
|
if (isDir && !slash)
|
||||||
|
m += '/'
|
||||||
|
else if (!isDir && slash)
|
||||||
|
m = m.slice(0, -1)
|
||||||
|
|
||||||
|
if (m !== p) {
|
||||||
|
var mabs = makeAbs(self, m)
|
||||||
|
self.statCache[mabs] = self.statCache[abs]
|
||||||
|
self.cache[mabs] = self.cache[abs]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
// lotta situps...
|
||||||
|
function makeAbs (self, f) {
|
||||||
|
var abs = f
|
||||||
|
if (f.charAt(0) === '/') {
|
||||||
|
abs = path.join(self.root, f)
|
||||||
|
} else if (isAbsolute(f) || f === '') {
|
||||||
|
abs = f
|
||||||
|
} else if (self.changedCwd) {
|
||||||
|
abs = path.resolve(self.cwd, f)
|
||||||
|
} else {
|
||||||
|
abs = path.resolve(f)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.platform === 'win32')
|
||||||
|
abs = abs.replace(/\\/g, '/')
|
||||||
|
|
||||||
|
return abs
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
|
||||||
|
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
|
||||||
|
function isIgnored (self, path) {
|
||||||
|
if (!self.ignore.length)
|
||||||
|
return false
|
||||||
|
|
||||||
|
return self.ignore.some(function(item) {
|
||||||
|
return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function childrenIgnored (self, path) {
|
||||||
|
if (!self.ignore.length)
|
||||||
|
return false
|
||||||
|
|
||||||
|
return self.ignore.some(function(item) {
|
||||||
|
return !!(item.gmatcher && item.gmatcher.match(path))
|
||||||
|
})
|
||||||
|
}
|
||||||
787
node_modules/github-linguist/node_modules/glob/glob.js
generated
vendored
Normal file
787
node_modules/github-linguist/node_modules/glob/glob.js
generated
vendored
Normal file
@@ -0,0 +1,787 @@
|
|||||||
|
// Approach:
|
||||||
|
//
|
||||||
|
// 1. Get the minimatch set
|
||||||
|
// 2. For each pattern in the set, PROCESS(pattern, false)
|
||||||
|
// 3. Store matches per-set, then uniq them
|
||||||
|
//
|
||||||
|
// PROCESS(pattern, inGlobStar)
|
||||||
|
// Get the first [n] items from pattern that are all strings
|
||||||
|
// Join these together. This is PREFIX.
|
||||||
|
// If there is no more remaining, then stat(PREFIX) and
|
||||||
|
// add to matches if it succeeds. END.
|
||||||
|
//
|
||||||
|
// If inGlobStar and PREFIX is symlink and points to dir
|
||||||
|
// set ENTRIES = []
|
||||||
|
// else readdir(PREFIX) as ENTRIES
|
||||||
|
// If fail, END
|
||||||
|
//
|
||||||
|
// with ENTRIES
|
||||||
|
// If pattern[n] is GLOBSTAR
|
||||||
|
// // handle the case where the globstar match is empty
|
||||||
|
// // by pruning it out, and testing the resulting pattern
|
||||||
|
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
|
||||||
|
// // handle other cases.
|
||||||
|
// for ENTRY in ENTRIES (not dotfiles)
|
||||||
|
// // attach globstar + tail onto the entry
|
||||||
|
// // Mark that this entry is a globstar match
|
||||||
|
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
|
||||||
|
//
|
||||||
|
// else // not globstar
|
||||||
|
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
|
||||||
|
// Test ENTRY against pattern[n]
|
||||||
|
// If fails, continue
|
||||||
|
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
|
||||||
|
//
|
||||||
|
// Caveat:
|
||||||
|
// Cache all stats and readdirs results to minimize syscall. Since all
|
||||||
|
// we ever care about is existence and directory-ness, we can just keep
|
||||||
|
// `true` for files, and [children,...] for directories, or `false` for
|
||||||
|
// things that don't exist.
|
||||||
|
|
||||||
|
module.exports = glob
|
||||||
|
|
||||||
|
var rp = require('fs.realpath')
|
||||||
|
var minimatch = require('minimatch')
|
||||||
|
var Minimatch = minimatch.Minimatch
|
||||||
|
var inherits = require('inherits')
|
||||||
|
var EE = require('events').EventEmitter
|
||||||
|
var path = require('path')
|
||||||
|
var assert = require('assert')
|
||||||
|
var isAbsolute = require('path-is-absolute')
|
||||||
|
var globSync = require('./sync.js')
|
||||||
|
var common = require('./common.js')
|
||||||
|
var setopts = common.setopts
|
||||||
|
var ownProp = common.ownProp
|
||||||
|
var inflight = require('inflight')
|
||||||
|
var util = require('util')
|
||||||
|
var childrenIgnored = common.childrenIgnored
|
||||||
|
var isIgnored = common.isIgnored
|
||||||
|
|
||||||
|
var once = require('once')
|
||||||
|
|
||||||
|
function glob (pattern, options, cb) {
|
||||||
|
if (typeof options === 'function') cb = options, options = {}
|
||||||
|
if (!options) options = {}
|
||||||
|
|
||||||
|
if (options.sync) {
|
||||||
|
if (cb)
|
||||||
|
throw new TypeError('callback provided to sync glob')
|
||||||
|
return globSync(pattern, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Glob(pattern, options, cb)
|
||||||
|
}
|
||||||
|
|
||||||
|
glob.sync = globSync
|
||||||
|
var GlobSync = glob.GlobSync = globSync.GlobSync
|
||||||
|
|
||||||
|
// old api surface
|
||||||
|
glob.glob = glob
|
||||||
|
|
||||||
|
function extend (origin, add) {
|
||||||
|
if (add === null || typeof add !== 'object') {
|
||||||
|
return origin
|
||||||
|
}
|
||||||
|
|
||||||
|
var keys = Object.keys(add)
|
||||||
|
var i = keys.length
|
||||||
|
while (i--) {
|
||||||
|
origin[keys[i]] = add[keys[i]]
|
||||||
|
}
|
||||||
|
return origin
|
||||||
|
}
|
||||||
|
|
||||||
|
glob.hasMagic = function (pattern, options_) {
|
||||||
|
var options = extend({}, options_)
|
||||||
|
options.noprocess = true
|
||||||
|
|
||||||
|
var g = new Glob(pattern, options)
|
||||||
|
var set = g.minimatch.set
|
||||||
|
|
||||||
|
if (!pattern)
|
||||||
|
return false
|
||||||
|
|
||||||
|
if (set.length > 1)
|
||||||
|
return true
|
||||||
|
|
||||||
|
for (var j = 0; j < set[0].length; j++) {
|
||||||
|
if (typeof set[0][j] !== 'string')
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
glob.Glob = Glob
|
||||||
|
inherits(Glob, EE)
|
||||||
|
function Glob (pattern, options, cb) {
|
||||||
|
if (typeof options === 'function') {
|
||||||
|
cb = options
|
||||||
|
options = null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options && options.sync) {
|
||||||
|
if (cb)
|
||||||
|
throw new TypeError('callback provided to sync glob')
|
||||||
|
return new GlobSync(pattern, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(this instanceof Glob))
|
||||||
|
return new Glob(pattern, options, cb)
|
||||||
|
|
||||||
|
setopts(this, pattern, options)
|
||||||
|
this._didRealPath = false
|
||||||
|
|
||||||
|
// process each pattern in the minimatch set
|
||||||
|
var n = this.minimatch.set.length
|
||||||
|
|
||||||
|
// The matches are stored as {<filename>: true,...} so that
|
||||||
|
// duplicates are automagically pruned.
|
||||||
|
// Later, we do an Object.keys() on these.
|
||||||
|
// Keep them as a list so we can fill in when nonull is set.
|
||||||
|
this.matches = new Array(n)
|
||||||
|
|
||||||
|
if (typeof cb === 'function') {
|
||||||
|
cb = once(cb)
|
||||||
|
this.on('error', cb)
|
||||||
|
this.on('end', function (matches) {
|
||||||
|
cb(null, matches)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
var self = this
|
||||||
|
this._processing = 0
|
||||||
|
|
||||||
|
this._emitQueue = []
|
||||||
|
this._processQueue = []
|
||||||
|
this.paused = false
|
||||||
|
|
||||||
|
if (this.noprocess)
|
||||||
|
return this
|
||||||
|
|
||||||
|
if (n === 0)
|
||||||
|
return done()
|
||||||
|
|
||||||
|
var sync = true
|
||||||
|
for (var i = 0; i < n; i ++) {
|
||||||
|
this._process(this.minimatch.set[i], i, false, done)
|
||||||
|
}
|
||||||
|
sync = false
|
||||||
|
|
||||||
|
function done () {
|
||||||
|
--self._processing
|
||||||
|
if (self._processing <= 0) {
|
||||||
|
if (sync) {
|
||||||
|
process.nextTick(function () {
|
||||||
|
self._finish()
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
self._finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._finish = function () {
|
||||||
|
assert(this instanceof Glob)
|
||||||
|
if (this.aborted)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (this.realpath && !this._didRealpath)
|
||||||
|
return this._realpath()
|
||||||
|
|
||||||
|
common.finish(this)
|
||||||
|
this.emit('end', this.found)
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._realpath = function () {
|
||||||
|
if (this._didRealpath)
|
||||||
|
return
|
||||||
|
|
||||||
|
this._didRealpath = true
|
||||||
|
|
||||||
|
var n = this.matches.length
|
||||||
|
if (n === 0)
|
||||||
|
return this._finish()
|
||||||
|
|
||||||
|
var self = this
|
||||||
|
for (var i = 0; i < this.matches.length; i++)
|
||||||
|
this._realpathSet(i, next)
|
||||||
|
|
||||||
|
function next () {
|
||||||
|
if (--n === 0)
|
||||||
|
self._finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._realpathSet = function (index, cb) {
|
||||||
|
var matchset = this.matches[index]
|
||||||
|
if (!matchset)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
var found = Object.keys(matchset)
|
||||||
|
var self = this
|
||||||
|
var n = found.length
|
||||||
|
|
||||||
|
if (n === 0)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
var set = this.matches[index] = Object.create(null)
|
||||||
|
found.forEach(function (p, i) {
|
||||||
|
// If there's a problem with the stat, then it means that
|
||||||
|
// one or more of the links in the realpath couldn't be
|
||||||
|
// resolved. just return the abs value in that case.
|
||||||
|
p = self._makeAbs(p)
|
||||||
|
rp.realpath(p, self.realpathCache, function (er, real) {
|
||||||
|
if (!er)
|
||||||
|
set[real] = true
|
||||||
|
else if (er.syscall === 'stat')
|
||||||
|
set[p] = true
|
||||||
|
else
|
||||||
|
self.emit('error', er) // srsly wtf right here
|
||||||
|
|
||||||
|
if (--n === 0) {
|
||||||
|
self.matches[index] = set
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._mark = function (p) {
|
||||||
|
return common.mark(this, p)
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._makeAbs = function (f) {
|
||||||
|
return common.makeAbs(this, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype.abort = function () {
|
||||||
|
this.aborted = true
|
||||||
|
this.emit('abort')
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype.pause = function () {
|
||||||
|
if (!this.paused) {
|
||||||
|
this.paused = true
|
||||||
|
this.emit('pause')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype.resume = function () {
|
||||||
|
if (this.paused) {
|
||||||
|
this.emit('resume')
|
||||||
|
this.paused = false
|
||||||
|
if (this._emitQueue.length) {
|
||||||
|
var eq = this._emitQueue.slice(0)
|
||||||
|
this._emitQueue.length = 0
|
||||||
|
for (var i = 0; i < eq.length; i ++) {
|
||||||
|
var e = eq[i]
|
||||||
|
this._emitMatch(e[0], e[1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this._processQueue.length) {
|
||||||
|
var pq = this._processQueue.slice(0)
|
||||||
|
this._processQueue.length = 0
|
||||||
|
for (var i = 0; i < pq.length; i ++) {
|
||||||
|
var p = pq[i]
|
||||||
|
this._processing--
|
||||||
|
this._process(p[0], p[1], p[2], p[3])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
|
||||||
|
assert(this instanceof Glob)
|
||||||
|
assert(typeof cb === 'function')
|
||||||
|
|
||||||
|
if (this.aborted)
|
||||||
|
return
|
||||||
|
|
||||||
|
this._processing++
|
||||||
|
if (this.paused) {
|
||||||
|
this._processQueue.push([pattern, index, inGlobStar, cb])
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
//console.error('PROCESS %d', this._processing, pattern)
|
||||||
|
|
||||||
|
// Get the first [n] parts of pattern that are all strings.
|
||||||
|
var n = 0
|
||||||
|
while (typeof pattern[n] === 'string') {
|
||||||
|
n ++
|
||||||
|
}
|
||||||
|
// now n is the index of the first one that is *not* a string.
|
||||||
|
|
||||||
|
// see if there's anything else
|
||||||
|
var prefix
|
||||||
|
switch (n) {
|
||||||
|
// if not, then this is rather simple
|
||||||
|
case pattern.length:
|
||||||
|
this._processSimple(pattern.join('/'), index, cb)
|
||||||
|
return
|
||||||
|
|
||||||
|
case 0:
|
||||||
|
// pattern *starts* with some non-trivial item.
|
||||||
|
// going to readdir(cwd), but not include the prefix in matches.
|
||||||
|
prefix = null
|
||||||
|
break
|
||||||
|
|
||||||
|
default:
|
||||||
|
// pattern has some string bits in the front.
|
||||||
|
// whatever it starts with, whether that's 'absolute' like /foo/bar,
|
||||||
|
// or 'relative' like '../baz'
|
||||||
|
prefix = pattern.slice(0, n).join('/')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
var remain = pattern.slice(n)
|
||||||
|
|
||||||
|
// get the list of entries.
|
||||||
|
var read
|
||||||
|
if (prefix === null)
|
||||||
|
read = '.'
|
||||||
|
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
|
||||||
|
if (!prefix || !isAbsolute(prefix))
|
||||||
|
prefix = '/' + prefix
|
||||||
|
read = prefix
|
||||||
|
} else
|
||||||
|
read = prefix
|
||||||
|
|
||||||
|
var abs = this._makeAbs(read)
|
||||||
|
|
||||||
|
//if ignored, skip _processing
|
||||||
|
if (childrenIgnored(this, read))
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
var isGlobStar = remain[0] === minimatch.GLOBSTAR
|
||||||
|
if (isGlobStar)
|
||||||
|
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
|
||||||
|
else
|
||||||
|
this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
|
||||||
|
var self = this
|
||||||
|
this._readdir(abs, inGlobStar, function (er, entries) {
|
||||||
|
return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
|
||||||
|
|
||||||
|
// if the abs isn't a dir, then nothing can match!
|
||||||
|
if (!entries)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
// It will only match dot entries if it starts with a dot, or if
|
||||||
|
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
|
||||||
|
var pn = remain[0]
|
||||||
|
var negate = !!this.minimatch.negate
|
||||||
|
var rawGlob = pn._glob
|
||||||
|
var dotOk = this.dot || rawGlob.charAt(0) === '.'
|
||||||
|
|
||||||
|
var matchedEntries = []
|
||||||
|
for (var i = 0; i < entries.length; i++) {
|
||||||
|
var e = entries[i]
|
||||||
|
if (e.charAt(0) !== '.' || dotOk) {
|
||||||
|
var m
|
||||||
|
if (negate && !prefix) {
|
||||||
|
m = !e.match(pn)
|
||||||
|
} else {
|
||||||
|
m = e.match(pn)
|
||||||
|
}
|
||||||
|
if (m)
|
||||||
|
matchedEntries.push(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
|
||||||
|
|
||||||
|
var len = matchedEntries.length
|
||||||
|
// If there are no matched entries, then nothing matches.
|
||||||
|
if (len === 0)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
// if this is the last remaining pattern bit, then no need for
|
||||||
|
// an additional stat *unless* the user has specified mark or
|
||||||
|
// stat explicitly. We know they exist, since readdir returned
|
||||||
|
// them.
|
||||||
|
|
||||||
|
if (remain.length === 1 && !this.mark && !this.stat) {
|
||||||
|
if (!this.matches[index])
|
||||||
|
this.matches[index] = Object.create(null)
|
||||||
|
|
||||||
|
for (var i = 0; i < len; i ++) {
|
||||||
|
var e = matchedEntries[i]
|
||||||
|
if (prefix) {
|
||||||
|
if (prefix !== '/')
|
||||||
|
e = prefix + '/' + e
|
||||||
|
else
|
||||||
|
e = prefix + e
|
||||||
|
}
|
||||||
|
|
||||||
|
if (e.charAt(0) === '/' && !this.nomount) {
|
||||||
|
e = path.join(this.root, e)
|
||||||
|
}
|
||||||
|
this._emitMatch(index, e)
|
||||||
|
}
|
||||||
|
// This was the last one, and no stats were needed
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
// now test all matched entries as stand-ins for that part
|
||||||
|
// of the pattern.
|
||||||
|
remain.shift()
|
||||||
|
for (var i = 0; i < len; i ++) {
|
||||||
|
var e = matchedEntries[i]
|
||||||
|
var newPattern
|
||||||
|
if (prefix) {
|
||||||
|
if (prefix !== '/')
|
||||||
|
e = prefix + '/' + e
|
||||||
|
else
|
||||||
|
e = prefix + e
|
||||||
|
}
|
||||||
|
this._process([e].concat(remain), index, inGlobStar, cb)
|
||||||
|
}
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._emitMatch = function (index, e) {
|
||||||
|
if (this.aborted)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (isIgnored(this, e))
|
||||||
|
return
|
||||||
|
|
||||||
|
if (this.paused) {
|
||||||
|
this._emitQueue.push([index, e])
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var abs = isAbsolute(e) ? e : this._makeAbs(e)
|
||||||
|
|
||||||
|
if (this.mark)
|
||||||
|
e = this._mark(e)
|
||||||
|
|
||||||
|
if (this.absolute)
|
||||||
|
e = abs
|
||||||
|
|
||||||
|
if (this.matches[index][e])
|
||||||
|
return
|
||||||
|
|
||||||
|
if (this.nodir) {
|
||||||
|
var c = this.cache[abs]
|
||||||
|
if (c === 'DIR' || Array.isArray(c))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.matches[index][e] = true
|
||||||
|
|
||||||
|
var st = this.statCache[abs]
|
||||||
|
if (st)
|
||||||
|
this.emit('stat', e, st)
|
||||||
|
|
||||||
|
this.emit('match', e)
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._readdirInGlobStar = function (abs, cb) {
|
||||||
|
if (this.aborted)
|
||||||
|
return
|
||||||
|
|
||||||
|
// follow all symlinked directories forever
|
||||||
|
// just proceed as if this is a non-globstar situation
|
||||||
|
if (this.follow)
|
||||||
|
return this._readdir(abs, false, cb)
|
||||||
|
|
||||||
|
var lstatkey = 'lstat\0' + abs
|
||||||
|
var self = this
|
||||||
|
var lstatcb = inflight(lstatkey, lstatcb_)
|
||||||
|
|
||||||
|
if (lstatcb)
|
||||||
|
self.fs.lstat(abs, lstatcb)
|
||||||
|
|
||||||
|
function lstatcb_ (er, lstat) {
|
||||||
|
if (er && er.code === 'ENOENT')
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
var isSym = lstat && lstat.isSymbolicLink()
|
||||||
|
self.symlinks[abs] = isSym
|
||||||
|
|
||||||
|
// If it's not a symlink or a dir, then it's definitely a regular file.
|
||||||
|
// don't bother doing a readdir in that case.
|
||||||
|
if (!isSym && lstat && !lstat.isDirectory()) {
|
||||||
|
self.cache[abs] = 'FILE'
|
||||||
|
cb()
|
||||||
|
} else
|
||||||
|
self._readdir(abs, false, cb)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._readdir = function (abs, inGlobStar, cb) {
|
||||||
|
if (this.aborted)
|
||||||
|
return
|
||||||
|
|
||||||
|
cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
|
||||||
|
if (!cb)
|
||||||
|
return
|
||||||
|
|
||||||
|
//console.error('RD %j %j', +inGlobStar, abs)
|
||||||
|
if (inGlobStar && !ownProp(this.symlinks, abs))
|
||||||
|
return this._readdirInGlobStar(abs, cb)
|
||||||
|
|
||||||
|
if (ownProp(this.cache, abs)) {
|
||||||
|
var c = this.cache[abs]
|
||||||
|
if (!c || c === 'FILE')
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
if (Array.isArray(c))
|
||||||
|
return cb(null, c)
|
||||||
|
}
|
||||||
|
|
||||||
|
var self = this
|
||||||
|
self.fs.readdir(abs, readdirCb(this, abs, cb))
|
||||||
|
}
|
||||||
|
|
||||||
|
function readdirCb (self, abs, cb) {
|
||||||
|
return function (er, entries) {
|
||||||
|
if (er)
|
||||||
|
self._readdirError(abs, er, cb)
|
||||||
|
else
|
||||||
|
self._readdirEntries(abs, entries, cb)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._readdirEntries = function (abs, entries, cb) {
|
||||||
|
if (this.aborted)
|
||||||
|
return
|
||||||
|
|
||||||
|
// if we haven't asked to stat everything, then just
|
||||||
|
// assume that everything in there exists, so we can avoid
|
||||||
|
// having to stat it a second time.
|
||||||
|
if (!this.mark && !this.stat) {
|
||||||
|
for (var i = 0; i < entries.length; i ++) {
|
||||||
|
var e = entries[i]
|
||||||
|
if (abs === '/')
|
||||||
|
e = abs + e
|
||||||
|
else
|
||||||
|
e = abs + '/' + e
|
||||||
|
this.cache[e] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.cache[abs] = entries
|
||||||
|
return cb(null, entries)
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._readdirError = function (f, er, cb) {
|
||||||
|
if (this.aborted)
|
||||||
|
return
|
||||||
|
|
||||||
|
// handle errors, and cache the information
|
||||||
|
switch (er.code) {
|
||||||
|
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
|
||||||
|
case 'ENOTDIR': // totally normal. means it *does* exist.
|
||||||
|
var abs = this._makeAbs(f)
|
||||||
|
this.cache[abs] = 'FILE'
|
||||||
|
if (abs === this.cwdAbs) {
|
||||||
|
var error = new Error(er.code + ' invalid cwd ' + this.cwd)
|
||||||
|
error.path = this.cwd
|
||||||
|
error.code = er.code
|
||||||
|
this.emit('error', error)
|
||||||
|
this.abort()
|
||||||
|
}
|
||||||
|
break
|
||||||
|
|
||||||
|
case 'ENOENT': // not terribly unusual
|
||||||
|
case 'ELOOP':
|
||||||
|
case 'ENAMETOOLONG':
|
||||||
|
case 'UNKNOWN':
|
||||||
|
this.cache[this._makeAbs(f)] = false
|
||||||
|
break
|
||||||
|
|
||||||
|
default: // some unusual error. Treat as failure.
|
||||||
|
this.cache[this._makeAbs(f)] = false
|
||||||
|
if (this.strict) {
|
||||||
|
this.emit('error', er)
|
||||||
|
// If the error is handled, then we abort
|
||||||
|
// if not, we threw out of here
|
||||||
|
this.abort()
|
||||||
|
}
|
||||||
|
if (!this.silent)
|
||||||
|
console.error('glob error', er)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
|
||||||
|
var self = this
|
||||||
|
this._readdir(abs, inGlobStar, function (er, entries) {
|
||||||
|
self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
|
||||||
|
//console.error('pgs2', prefix, remain[0], entries)
|
||||||
|
|
||||||
|
// no entries means not a dir, so it can never have matches
|
||||||
|
// foo.txt/** doesn't match foo.txt
|
||||||
|
if (!entries)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
// test without the globstar, and with every child both below
|
||||||
|
// and replacing the globstar.
|
||||||
|
var remainWithoutGlobStar = remain.slice(1)
|
||||||
|
var gspref = prefix ? [ prefix ] : []
|
||||||
|
var noGlobStar = gspref.concat(remainWithoutGlobStar)
|
||||||
|
|
||||||
|
// the noGlobStar pattern exits the inGlobStar state
|
||||||
|
this._process(noGlobStar, index, false, cb)
|
||||||
|
|
||||||
|
var isSym = this.symlinks[abs]
|
||||||
|
var len = entries.length
|
||||||
|
|
||||||
|
// If it's a symlink, and we're in a globstar, then stop
|
||||||
|
if (isSym && inGlobStar)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
for (var i = 0; i < len; i++) {
|
||||||
|
var e = entries[i]
|
||||||
|
if (e.charAt(0) === '.' && !this.dot)
|
||||||
|
continue
|
||||||
|
|
||||||
|
// these two cases enter the inGlobStar state
|
||||||
|
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
|
||||||
|
this._process(instead, index, true, cb)
|
||||||
|
|
||||||
|
var below = gspref.concat(entries[i], remain)
|
||||||
|
this._process(below, index, true, cb)
|
||||||
|
}
|
||||||
|
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._processSimple = function (prefix, index, cb) {
|
||||||
|
// XXX review this. Shouldn't it be doing the mounting etc
|
||||||
|
// before doing stat? kinda weird?
|
||||||
|
var self = this
|
||||||
|
this._stat(prefix, function (er, exists) {
|
||||||
|
self._processSimple2(prefix, index, er, exists, cb)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
|
||||||
|
|
||||||
|
//console.error('ps2', prefix, exists)
|
||||||
|
|
||||||
|
if (!this.matches[index])
|
||||||
|
this.matches[index] = Object.create(null)
|
||||||
|
|
||||||
|
// If it doesn't exist, then just mark the lack of results
|
||||||
|
if (!exists)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
if (prefix && isAbsolute(prefix) && !this.nomount) {
|
||||||
|
var trail = /[\/\\]$/.test(prefix)
|
||||||
|
if (prefix.charAt(0) === '/') {
|
||||||
|
prefix = path.join(this.root, prefix)
|
||||||
|
} else {
|
||||||
|
prefix = path.resolve(this.root, prefix)
|
||||||
|
if (trail)
|
||||||
|
prefix += '/'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.platform === 'win32')
|
||||||
|
prefix = prefix.replace(/\\/g, '/')
|
||||||
|
|
||||||
|
// Mark this as a match
|
||||||
|
this._emitMatch(index, prefix)
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns either 'DIR', 'FILE', or false
|
||||||
|
Glob.prototype._stat = function (f, cb) {
|
||||||
|
var abs = this._makeAbs(f)
|
||||||
|
var needDir = f.slice(-1) === '/'
|
||||||
|
|
||||||
|
if (f.length > this.maxLength)
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
if (!this.stat && ownProp(this.cache, abs)) {
|
||||||
|
var c = this.cache[abs]
|
||||||
|
|
||||||
|
if (Array.isArray(c))
|
||||||
|
c = 'DIR'
|
||||||
|
|
||||||
|
// It exists, but maybe not how we need it
|
||||||
|
if (!needDir || c === 'DIR')
|
||||||
|
return cb(null, c)
|
||||||
|
|
||||||
|
if (needDir && c === 'FILE')
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
// otherwise we have to stat, because maybe c=true
|
||||||
|
// if we know it exists, but not what it is.
|
||||||
|
}
|
||||||
|
|
||||||
|
var exists
|
||||||
|
var stat = this.statCache[abs]
|
||||||
|
if (stat !== undefined) {
|
||||||
|
if (stat === false)
|
||||||
|
return cb(null, stat)
|
||||||
|
else {
|
||||||
|
var type = stat.isDirectory() ? 'DIR' : 'FILE'
|
||||||
|
if (needDir && type === 'FILE')
|
||||||
|
return cb()
|
||||||
|
else
|
||||||
|
return cb(null, type, stat)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var self = this
|
||||||
|
var statcb = inflight('stat\0' + abs, lstatcb_)
|
||||||
|
if (statcb)
|
||||||
|
self.fs.lstat(abs, statcb)
|
||||||
|
|
||||||
|
function lstatcb_ (er, lstat) {
|
||||||
|
if (lstat && lstat.isSymbolicLink()) {
|
||||||
|
// If it's a symlink, then treat it as the target, unless
|
||||||
|
// the target does not exist, then treat it as a file.
|
||||||
|
return self.fs.stat(abs, function (er, stat) {
|
||||||
|
if (er)
|
||||||
|
self._stat2(f, abs, null, lstat, cb)
|
||||||
|
else
|
||||||
|
self._stat2(f, abs, er, stat, cb)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
self._stat2(f, abs, er, lstat, cb)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
|
||||||
|
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
|
||||||
|
this.statCache[abs] = false
|
||||||
|
return cb()
|
||||||
|
}
|
||||||
|
|
||||||
|
var needDir = f.slice(-1) === '/'
|
||||||
|
this.statCache[abs] = stat
|
||||||
|
|
||||||
|
if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
|
||||||
|
return cb(null, false, stat)
|
||||||
|
|
||||||
|
var c = true
|
||||||
|
if (stat)
|
||||||
|
c = stat.isDirectory() ? 'DIR' : 'FILE'
|
||||||
|
this.cache[abs] = this.cache[abs] || c
|
||||||
|
|
||||||
|
if (needDir && c === 'FILE')
|
||||||
|
return cb()
|
||||||
|
|
||||||
|
return cb(null, c, stat)
|
||||||
|
}
|
||||||
52
node_modules/github-linguist/node_modules/glob/package.json
generated
vendored
Normal file
52
node_modules/github-linguist/node_modules/glob/package.json
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
{
|
||||||
|
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||||
|
"name": "glob",
|
||||||
|
"description": "a little globber",
|
||||||
|
"version": "7.2.0",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/isaacs/node-glob.git"
|
||||||
|
},
|
||||||
|
"main": "glob.js",
|
||||||
|
"files": [
|
||||||
|
"glob.js",
|
||||||
|
"sync.js",
|
||||||
|
"common.js"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": "*"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"fs.realpath": "^1.0.0",
|
||||||
|
"inflight": "^1.0.4",
|
||||||
|
"inherits": "2",
|
||||||
|
"minimatch": "^3.0.4",
|
||||||
|
"once": "^1.3.0",
|
||||||
|
"path-is-absolute": "^1.0.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"memfs": "^3.2.0",
|
||||||
|
"mkdirp": "0",
|
||||||
|
"rimraf": "^2.2.8",
|
||||||
|
"tap": "^15.0.6",
|
||||||
|
"tick": "0.0.6"
|
||||||
|
},
|
||||||
|
"tap": {
|
||||||
|
"before": "test/00-setup.js",
|
||||||
|
"after": "test/zz-cleanup.js",
|
||||||
|
"jobs": 1
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"prepublish": "npm run benchclean",
|
||||||
|
"profclean": "rm -f v8.log profile.txt",
|
||||||
|
"test": "tap",
|
||||||
|
"test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js",
|
||||||
|
"bench": "bash benchmark.sh",
|
||||||
|
"prof": "bash prof.sh && cat profile.txt",
|
||||||
|
"benchclean": "node benchclean.js"
|
||||||
|
},
|
||||||
|
"license": "ISC",
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
|
}
|
||||||
|
}
|
||||||
483
node_modules/github-linguist/node_modules/glob/sync.js
generated
vendored
Normal file
483
node_modules/github-linguist/node_modules/glob/sync.js
generated
vendored
Normal file
@@ -0,0 +1,483 @@
|
|||||||
|
module.exports = globSync
|
||||||
|
globSync.GlobSync = GlobSync
|
||||||
|
|
||||||
|
var rp = require('fs.realpath')
|
||||||
|
var minimatch = require('minimatch')
|
||||||
|
var Minimatch = minimatch.Minimatch
|
||||||
|
var Glob = require('./glob.js').Glob
|
||||||
|
var util = require('util')
|
||||||
|
var path = require('path')
|
||||||
|
var assert = require('assert')
|
||||||
|
var isAbsolute = require('path-is-absolute')
|
||||||
|
var common = require('./common.js')
|
||||||
|
var setopts = common.setopts
|
||||||
|
var ownProp = common.ownProp
|
||||||
|
var childrenIgnored = common.childrenIgnored
|
||||||
|
var isIgnored = common.isIgnored
|
||||||
|
|
||||||
|
function globSync (pattern, options) {
|
||||||
|
if (typeof options === 'function' || arguments.length === 3)
|
||||||
|
throw new TypeError('callback provided to sync glob\n'+
|
||||||
|
'See: https://github.com/isaacs/node-glob/issues/167')
|
||||||
|
|
||||||
|
return new GlobSync(pattern, options).found
|
||||||
|
}
|
||||||
|
|
||||||
|
function GlobSync (pattern, options) {
|
||||||
|
if (!pattern)
|
||||||
|
throw new Error('must provide pattern')
|
||||||
|
|
||||||
|
if (typeof options === 'function' || arguments.length === 3)
|
||||||
|
throw new TypeError('callback provided to sync glob\n'+
|
||||||
|
'See: https://github.com/isaacs/node-glob/issues/167')
|
||||||
|
|
||||||
|
if (!(this instanceof GlobSync))
|
||||||
|
return new GlobSync(pattern, options)
|
||||||
|
|
||||||
|
setopts(this, pattern, options)
|
||||||
|
|
||||||
|
if (this.noprocess)
|
||||||
|
return this
|
||||||
|
|
||||||
|
var n = this.minimatch.set.length
|
||||||
|
this.matches = new Array(n)
|
||||||
|
for (var i = 0; i < n; i ++) {
|
||||||
|
this._process(this.minimatch.set[i], i, false)
|
||||||
|
}
|
||||||
|
this._finish()
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._finish = function () {
|
||||||
|
assert(this instanceof GlobSync)
|
||||||
|
if (this.realpath) {
|
||||||
|
var self = this
|
||||||
|
this.matches.forEach(function (matchset, index) {
|
||||||
|
var set = self.matches[index] = Object.create(null)
|
||||||
|
for (var p in matchset) {
|
||||||
|
try {
|
||||||
|
p = self._makeAbs(p)
|
||||||
|
var real = rp.realpathSync(p, self.realpathCache)
|
||||||
|
set[real] = true
|
||||||
|
} catch (er) {
|
||||||
|
if (er.syscall === 'stat')
|
||||||
|
set[self._makeAbs(p)] = true
|
||||||
|
else
|
||||||
|
throw er
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
common.finish(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
GlobSync.prototype._process = function (pattern, index, inGlobStar) {
|
||||||
|
assert(this instanceof GlobSync)
|
||||||
|
|
||||||
|
// Get the first [n] parts of pattern that are all strings.
|
||||||
|
var n = 0
|
||||||
|
while (typeof pattern[n] === 'string') {
|
||||||
|
n ++
|
||||||
|
}
|
||||||
|
// now n is the index of the first one that is *not* a string.
|
||||||
|
|
||||||
|
// See if there's anything else
|
||||||
|
var prefix
|
||||||
|
switch (n) {
|
||||||
|
// if not, then this is rather simple
|
||||||
|
case pattern.length:
|
||||||
|
this._processSimple(pattern.join('/'), index)
|
||||||
|
return
|
||||||
|
|
||||||
|
case 0:
|
||||||
|
// pattern *starts* with some non-trivial item.
|
||||||
|
// going to readdir(cwd), but not include the prefix in matches.
|
||||||
|
prefix = null
|
||||||
|
break
|
||||||
|
|
||||||
|
default:
|
||||||
|
// pattern has some string bits in the front.
|
||||||
|
// whatever it starts with, whether that's 'absolute' like /foo/bar,
|
||||||
|
// or 'relative' like '../baz'
|
||||||
|
prefix = pattern.slice(0, n).join('/')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
var remain = pattern.slice(n)
|
||||||
|
|
||||||
|
// get the list of entries.
|
||||||
|
var read
|
||||||
|
if (prefix === null)
|
||||||
|
read = '.'
|
||||||
|
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
|
||||||
|
if (!prefix || !isAbsolute(prefix))
|
||||||
|
prefix = '/' + prefix
|
||||||
|
read = prefix
|
||||||
|
} else
|
||||||
|
read = prefix
|
||||||
|
|
||||||
|
var abs = this._makeAbs(read)
|
||||||
|
|
||||||
|
//if ignored, skip processing
|
||||||
|
if (childrenIgnored(this, read))
|
||||||
|
return
|
||||||
|
|
||||||
|
var isGlobStar = remain[0] === minimatch.GLOBSTAR
|
||||||
|
if (isGlobStar)
|
||||||
|
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
|
||||||
|
else
|
||||||
|
this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
|
||||||
|
var entries = this._readdir(abs, inGlobStar)
|
||||||
|
|
||||||
|
// if the abs isn't a dir, then nothing can match!
|
||||||
|
if (!entries)
|
||||||
|
return
|
||||||
|
|
||||||
|
// It will only match dot entries if it starts with a dot, or if
|
||||||
|
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
|
||||||
|
var pn = remain[0]
|
||||||
|
var negate = !!this.minimatch.negate
|
||||||
|
var rawGlob = pn._glob
|
||||||
|
var dotOk = this.dot || rawGlob.charAt(0) === '.'
|
||||||
|
|
||||||
|
var matchedEntries = []
|
||||||
|
for (var i = 0; i < entries.length; i++) {
|
||||||
|
var e = entries[i]
|
||||||
|
if (e.charAt(0) !== '.' || dotOk) {
|
||||||
|
var m
|
||||||
|
if (negate && !prefix) {
|
||||||
|
m = !e.match(pn)
|
||||||
|
} else {
|
||||||
|
m = e.match(pn)
|
||||||
|
}
|
||||||
|
if (m)
|
||||||
|
matchedEntries.push(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var len = matchedEntries.length
|
||||||
|
// If there are no matched entries, then nothing matches.
|
||||||
|
if (len === 0)
|
||||||
|
return
|
||||||
|
|
||||||
|
// if this is the last remaining pattern bit, then no need for
|
||||||
|
// an additional stat *unless* the user has specified mark or
|
||||||
|
// stat explicitly. We know they exist, since readdir returned
|
||||||
|
// them.
|
||||||
|
|
||||||
|
if (remain.length === 1 && !this.mark && !this.stat) {
|
||||||
|
if (!this.matches[index])
|
||||||
|
this.matches[index] = Object.create(null)
|
||||||
|
|
||||||
|
for (var i = 0; i < len; i ++) {
|
||||||
|
var e = matchedEntries[i]
|
||||||
|
if (prefix) {
|
||||||
|
if (prefix.slice(-1) !== '/')
|
||||||
|
e = prefix + '/' + e
|
||||||
|
else
|
||||||
|
e = prefix + e
|
||||||
|
}
|
||||||
|
|
||||||
|
if (e.charAt(0) === '/' && !this.nomount) {
|
||||||
|
e = path.join(this.root, e)
|
||||||
|
}
|
||||||
|
this._emitMatch(index, e)
|
||||||
|
}
|
||||||
|
// This was the last one, and no stats were needed
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// now test all matched entries as stand-ins for that part
|
||||||
|
// of the pattern.
|
||||||
|
remain.shift()
|
||||||
|
for (var i = 0; i < len; i ++) {
|
||||||
|
var e = matchedEntries[i]
|
||||||
|
var newPattern
|
||||||
|
if (prefix)
|
||||||
|
newPattern = [prefix, e]
|
||||||
|
else
|
||||||
|
newPattern = [e]
|
||||||
|
this._process(newPattern.concat(remain), index, inGlobStar)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
GlobSync.prototype._emitMatch = function (index, e) {
|
||||||
|
if (isIgnored(this, e))
|
||||||
|
return
|
||||||
|
|
||||||
|
var abs = this._makeAbs(e)
|
||||||
|
|
||||||
|
if (this.mark)
|
||||||
|
e = this._mark(e)
|
||||||
|
|
||||||
|
if (this.absolute) {
|
||||||
|
e = abs
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.matches[index][e])
|
||||||
|
return
|
||||||
|
|
||||||
|
if (this.nodir) {
|
||||||
|
var c = this.cache[abs]
|
||||||
|
if (c === 'DIR' || Array.isArray(c))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.matches[index][e] = true
|
||||||
|
|
||||||
|
if (this.stat)
|
||||||
|
this._stat(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
GlobSync.prototype._readdirInGlobStar = function (abs) {
|
||||||
|
// follow all symlinked directories forever
|
||||||
|
// just proceed as if this is a non-globstar situation
|
||||||
|
if (this.follow)
|
||||||
|
return this._readdir(abs, false)
|
||||||
|
|
||||||
|
var entries
|
||||||
|
var lstat
|
||||||
|
var stat
|
||||||
|
try {
|
||||||
|
lstat = this.fs.lstatSync(abs)
|
||||||
|
} catch (er) {
|
||||||
|
if (er.code === 'ENOENT') {
|
||||||
|
// lstat failed, doesn't exist
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var isSym = lstat && lstat.isSymbolicLink()
|
||||||
|
this.symlinks[abs] = isSym
|
||||||
|
|
||||||
|
// If it's not a symlink or a dir, then it's definitely a regular file.
|
||||||
|
// don't bother doing a readdir in that case.
|
||||||
|
if (!isSym && lstat && !lstat.isDirectory())
|
||||||
|
this.cache[abs] = 'FILE'
|
||||||
|
else
|
||||||
|
entries = this._readdir(abs, false)
|
||||||
|
|
||||||
|
return entries
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._readdir = function (abs, inGlobStar) {
|
||||||
|
var entries
|
||||||
|
|
||||||
|
if (inGlobStar && !ownProp(this.symlinks, abs))
|
||||||
|
return this._readdirInGlobStar(abs)
|
||||||
|
|
||||||
|
if (ownProp(this.cache, abs)) {
|
||||||
|
var c = this.cache[abs]
|
||||||
|
if (!c || c === 'FILE')
|
||||||
|
return null
|
||||||
|
|
||||||
|
if (Array.isArray(c))
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return this._readdirEntries(abs, this.fs.readdirSync(abs))
|
||||||
|
} catch (er) {
|
||||||
|
this._readdirError(abs, er)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._readdirEntries = function (abs, entries) {
|
||||||
|
// if we haven't asked to stat everything, then just
|
||||||
|
// assume that everything in there exists, so we can avoid
|
||||||
|
// having to stat it a second time.
|
||||||
|
if (!this.mark && !this.stat) {
|
||||||
|
for (var i = 0; i < entries.length; i ++) {
|
||||||
|
var e = entries[i]
|
||||||
|
if (abs === '/')
|
||||||
|
e = abs + e
|
||||||
|
else
|
||||||
|
e = abs + '/' + e
|
||||||
|
this.cache[e] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.cache[abs] = entries
|
||||||
|
|
||||||
|
// mark and cache dir-ness
|
||||||
|
return entries
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._readdirError = function (f, er) {
|
||||||
|
// handle errors, and cache the information
|
||||||
|
switch (er.code) {
|
||||||
|
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
|
||||||
|
case 'ENOTDIR': // totally normal. means it *does* exist.
|
||||||
|
var abs = this._makeAbs(f)
|
||||||
|
this.cache[abs] = 'FILE'
|
||||||
|
if (abs === this.cwdAbs) {
|
||||||
|
var error = new Error(er.code + ' invalid cwd ' + this.cwd)
|
||||||
|
error.path = this.cwd
|
||||||
|
error.code = er.code
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
break
|
||||||
|
|
||||||
|
case 'ENOENT': // not terribly unusual
|
||||||
|
case 'ELOOP':
|
||||||
|
case 'ENAMETOOLONG':
|
||||||
|
case 'UNKNOWN':
|
||||||
|
this.cache[this._makeAbs(f)] = false
|
||||||
|
break
|
||||||
|
|
||||||
|
default: // some unusual error. Treat as failure.
|
||||||
|
this.cache[this._makeAbs(f)] = false
|
||||||
|
if (this.strict)
|
||||||
|
throw er
|
||||||
|
if (!this.silent)
|
||||||
|
console.error('glob error', er)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
|
||||||
|
|
||||||
|
var entries = this._readdir(abs, inGlobStar)
|
||||||
|
|
||||||
|
// no entries means not a dir, so it can never have matches
|
||||||
|
// foo.txt/** doesn't match foo.txt
|
||||||
|
if (!entries)
|
||||||
|
return
|
||||||
|
|
||||||
|
// test without the globstar, and with every child both below
|
||||||
|
// and replacing the globstar.
|
||||||
|
var remainWithoutGlobStar = remain.slice(1)
|
||||||
|
var gspref = prefix ? [ prefix ] : []
|
||||||
|
var noGlobStar = gspref.concat(remainWithoutGlobStar)
|
||||||
|
|
||||||
|
// the noGlobStar pattern exits the inGlobStar state
|
||||||
|
this._process(noGlobStar, index, false)
|
||||||
|
|
||||||
|
var len = entries.length
|
||||||
|
var isSym = this.symlinks[abs]
|
||||||
|
|
||||||
|
// If it's a symlink, and we're in a globstar, then stop
|
||||||
|
if (isSym && inGlobStar)
|
||||||
|
return
|
||||||
|
|
||||||
|
for (var i = 0; i < len; i++) {
|
||||||
|
var e = entries[i]
|
||||||
|
if (e.charAt(0) === '.' && !this.dot)
|
||||||
|
continue
|
||||||
|
|
||||||
|
// these two cases enter the inGlobStar state
|
||||||
|
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
|
||||||
|
this._process(instead, index, true)
|
||||||
|
|
||||||
|
var below = gspref.concat(entries[i], remain)
|
||||||
|
this._process(below, index, true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._processSimple = function (prefix, index) {
|
||||||
|
// XXX review this. Shouldn't it be doing the mounting etc
|
||||||
|
// before doing stat? kinda weird?
|
||||||
|
var exists = this._stat(prefix)
|
||||||
|
|
||||||
|
if (!this.matches[index])
|
||||||
|
this.matches[index] = Object.create(null)
|
||||||
|
|
||||||
|
// If it doesn't exist, then just mark the lack of results
|
||||||
|
if (!exists)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (prefix && isAbsolute(prefix) && !this.nomount) {
|
||||||
|
var trail = /[\/\\]$/.test(prefix)
|
||||||
|
if (prefix.charAt(0) === '/') {
|
||||||
|
prefix = path.join(this.root, prefix)
|
||||||
|
} else {
|
||||||
|
prefix = path.resolve(this.root, prefix)
|
||||||
|
if (trail)
|
||||||
|
prefix += '/'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.platform === 'win32')
|
||||||
|
prefix = prefix.replace(/\\/g, '/')
|
||||||
|
|
||||||
|
// Mark this as a match
|
||||||
|
this._emitMatch(index, prefix)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns either 'DIR', 'FILE', or false
|
||||||
|
GlobSync.prototype._stat = function (f) {
|
||||||
|
var abs = this._makeAbs(f)
|
||||||
|
var needDir = f.slice(-1) === '/'
|
||||||
|
|
||||||
|
if (f.length > this.maxLength)
|
||||||
|
return false
|
||||||
|
|
||||||
|
if (!this.stat && ownProp(this.cache, abs)) {
|
||||||
|
var c = this.cache[abs]
|
||||||
|
|
||||||
|
if (Array.isArray(c))
|
||||||
|
c = 'DIR'
|
||||||
|
|
||||||
|
// It exists, but maybe not how we need it
|
||||||
|
if (!needDir || c === 'DIR')
|
||||||
|
return c
|
||||||
|
|
||||||
|
if (needDir && c === 'FILE')
|
||||||
|
return false
|
||||||
|
|
||||||
|
// otherwise we have to stat, because maybe c=true
|
||||||
|
// if we know it exists, but not what it is.
|
||||||
|
}
|
||||||
|
|
||||||
|
var exists
|
||||||
|
var stat = this.statCache[abs]
|
||||||
|
if (!stat) {
|
||||||
|
var lstat
|
||||||
|
try {
|
||||||
|
lstat = this.fs.lstatSync(abs)
|
||||||
|
} catch (er) {
|
||||||
|
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
|
||||||
|
this.statCache[abs] = false
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lstat && lstat.isSymbolicLink()) {
|
||||||
|
try {
|
||||||
|
stat = this.fs.statSync(abs)
|
||||||
|
} catch (er) {
|
||||||
|
stat = lstat
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
stat = lstat
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.statCache[abs] = stat
|
||||||
|
|
||||||
|
var c = true
|
||||||
|
if (stat)
|
||||||
|
c = stat.isDirectory() ? 'DIR' : 'FILE'
|
||||||
|
|
||||||
|
this.cache[abs] = this.cache[abs] || c
|
||||||
|
|
||||||
|
if (needDir && c === 'FILE')
|
||||||
|
return false
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._mark = function (p) {
|
||||||
|
return common.mark(this, p)
|
||||||
|
}
|
||||||
|
|
||||||
|
GlobSync.prototype._makeAbs = function (f) {
|
||||||
|
return common.makeAbs(this, f)
|
||||||
|
}
|
||||||
8
node_modules/glob/LICENSE
generated
vendored
8
node_modules/glob/LICENSE
generated
vendored
@@ -1,6 +1,6 @@
|
|||||||
The ISC License
|
The ISC License
|
||||||
|
|
||||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
Copyright (c) 2009-2022 Isaac Z. Schlueter and Contributors
|
||||||
|
|
||||||
Permission to use, copy, modify, and/or distribute this software for any
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
purpose with or without fee is hereby granted, provided that the above
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
@@ -13,9 +13,3 @@ ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|||||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
## Glob Logo
|
|
||||||
|
|
||||||
Glob's logo created by Tanya Brassie <http://tanyabrassie.com/>, licensed
|
|
||||||
under a Creative Commons Attribution-ShareAlike 4.0 International License
|
|
||||||
https://creativecommons.org/licenses/by-sa/4.0/
|
|
||||||
|
|||||||
3
node_modules/glob/README.md
generated
vendored
3
node_modules/glob/README.md
generated
vendored
@@ -276,6 +276,9 @@ the filesystem.
|
|||||||
* `absolute` Set to true to always receive absolute paths for matched
|
* `absolute` Set to true to always receive absolute paths for matched
|
||||||
files. Unlike `realpath`, this also affects the values returned in
|
files. Unlike `realpath`, this also affects the values returned in
|
||||||
the `match` event.
|
the `match` event.
|
||||||
|
* `fs` File-system object with Node's `fs` API. By default, the built-in
|
||||||
|
`fs` module will be used. Set to a volume provided by a library like
|
||||||
|
`memfs` to avoid using the "real" file-system.
|
||||||
|
|
||||||
## Comparisons to other fnmatch/glob implementations
|
## Comparisons to other fnmatch/glob implementations
|
||||||
|
|
||||||
|
|||||||
67
node_modules/glob/changelog.md
generated
vendored
67
node_modules/glob/changelog.md
generated
vendored
@@ -1,67 +0,0 @@
|
|||||||
## 7.0
|
|
||||||
|
|
||||||
- Raise error if `options.cwd` is specified, and not a directory
|
|
||||||
|
|
||||||
## 6.0
|
|
||||||
|
|
||||||
- Remove comment and negation pattern support
|
|
||||||
- Ignore patterns are always in `dot:true` mode
|
|
||||||
|
|
||||||
## 5.0
|
|
||||||
|
|
||||||
- Deprecate comment and negation patterns
|
|
||||||
- Fix regression in `mark` and `nodir` options from making all cache
|
|
||||||
keys absolute path.
|
|
||||||
- Abort if `fs.readdir` returns an error that's unexpected
|
|
||||||
- Don't emit `match` events for ignored items
|
|
||||||
- Treat ENOTSUP like ENOTDIR in readdir
|
|
||||||
|
|
||||||
## 4.5
|
|
||||||
|
|
||||||
- Add `options.follow` to always follow directory symlinks in globstar
|
|
||||||
- Add `options.realpath` to call `fs.realpath` on all results
|
|
||||||
- Always cache based on absolute path
|
|
||||||
|
|
||||||
## 4.4
|
|
||||||
|
|
||||||
- Add `options.ignore`
|
|
||||||
- Fix handling of broken symlinks
|
|
||||||
|
|
||||||
## 4.3
|
|
||||||
|
|
||||||
- Bump minimatch to 2.x
|
|
||||||
- Pass all tests on Windows
|
|
||||||
|
|
||||||
## 4.2
|
|
||||||
|
|
||||||
- Add `glob.hasMagic` function
|
|
||||||
- Add `options.nodir` flag
|
|
||||||
|
|
||||||
## 4.1
|
|
||||||
|
|
||||||
- Refactor sync and async implementations for performance
|
|
||||||
- Throw if callback provided to sync glob function
|
|
||||||
- Treat symbolic links in globstar results the same as Bash 4.3
|
|
||||||
|
|
||||||
## 4.0
|
|
||||||
|
|
||||||
- Use `^` for dependency versions (bumped major because this breaks
|
|
||||||
older npm versions)
|
|
||||||
- Ensure callbacks are only ever called once
|
|
||||||
- switch to ISC license
|
|
||||||
|
|
||||||
## 3.x
|
|
||||||
|
|
||||||
- Rewrite in JavaScript
|
|
||||||
- Add support for setting root, cwd, and windows support
|
|
||||||
- Cache many fs calls
|
|
||||||
- Add globstar support
|
|
||||||
- emit match events
|
|
||||||
|
|
||||||
## 2.x
|
|
||||||
|
|
||||||
- Use `glob.h` and `fnmatch.h` from NetBSD
|
|
||||||
|
|
||||||
## 1.x
|
|
||||||
|
|
||||||
- `glob.h` static binding.
|
|
||||||
4
node_modules/glob/common.js
generated
vendored
4
node_modules/glob/common.js
generated
vendored
@@ -10,6 +10,7 @@ function ownProp (obj, field) {
|
|||||||
return Object.prototype.hasOwnProperty.call(obj, field)
|
return Object.prototype.hasOwnProperty.call(obj, field)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var fs = require("fs")
|
||||||
var path = require("path")
|
var path = require("path")
|
||||||
var minimatch = require("minimatch")
|
var minimatch = require("minimatch")
|
||||||
var isAbsolute = require("path-is-absolute")
|
var isAbsolute = require("path-is-absolute")
|
||||||
@@ -75,6 +76,7 @@ function setopts (self, pattern, options) {
|
|||||||
self.stat = !!options.stat
|
self.stat = !!options.stat
|
||||||
self.noprocess = !!options.noprocess
|
self.noprocess = !!options.noprocess
|
||||||
self.absolute = !!options.absolute
|
self.absolute = !!options.absolute
|
||||||
|
self.fs = options.fs || fs
|
||||||
|
|
||||||
self.maxLength = options.maxLength || Infinity
|
self.maxLength = options.maxLength || Infinity
|
||||||
self.cache = options.cache || Object.create(null)
|
self.cache = options.cache || Object.create(null)
|
||||||
@@ -108,6 +110,8 @@ function setopts (self, pattern, options) {
|
|||||||
// Note that they are not supported in Glob itself anyway.
|
// Note that they are not supported in Glob itself anyway.
|
||||||
options.nonegate = true
|
options.nonegate = true
|
||||||
options.nocomment = true
|
options.nocomment = true
|
||||||
|
// always treat \ in patterns as escapes, not path separators
|
||||||
|
options.allowWindowsEscape = true
|
||||||
|
|
||||||
self.minimatch = new Minimatch(pattern, options)
|
self.minimatch = new Minimatch(pattern, options)
|
||||||
self.options = self.minimatch.options
|
self.options = self.minimatch.options
|
||||||
|
|||||||
14
node_modules/glob/glob.js
generated
vendored
14
node_modules/glob/glob.js
generated
vendored
@@ -40,7 +40,6 @@
|
|||||||
|
|
||||||
module.exports = glob
|
module.exports = glob
|
||||||
|
|
||||||
var fs = require('fs')
|
|
||||||
var rp = require('fs.realpath')
|
var rp = require('fs.realpath')
|
||||||
var minimatch = require('minimatch')
|
var minimatch = require('minimatch')
|
||||||
var Minimatch = minimatch.Minimatch
|
var Minimatch = minimatch.Minimatch
|
||||||
@@ -343,7 +342,10 @@ Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
|
|||||||
var read
|
var read
|
||||||
if (prefix === null)
|
if (prefix === null)
|
||||||
read = '.'
|
read = '.'
|
||||||
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
|
else if (isAbsolute(prefix) ||
|
||||||
|
isAbsolute(pattern.map(function (p) {
|
||||||
|
return typeof p === 'string' ? p : '[*]'
|
||||||
|
}).join('/'))) {
|
||||||
if (!prefix || !isAbsolute(prefix))
|
if (!prefix || !isAbsolute(prefix))
|
||||||
prefix = '/' + prefix
|
prefix = '/' + prefix
|
||||||
read = prefix
|
read = prefix
|
||||||
@@ -501,7 +503,7 @@ Glob.prototype._readdirInGlobStar = function (abs, cb) {
|
|||||||
var lstatcb = inflight(lstatkey, lstatcb_)
|
var lstatcb = inflight(lstatkey, lstatcb_)
|
||||||
|
|
||||||
if (lstatcb)
|
if (lstatcb)
|
||||||
fs.lstat(abs, lstatcb)
|
self.fs.lstat(abs, lstatcb)
|
||||||
|
|
||||||
function lstatcb_ (er, lstat) {
|
function lstatcb_ (er, lstat) {
|
||||||
if (er && er.code === 'ENOENT')
|
if (er && er.code === 'ENOENT')
|
||||||
@@ -542,7 +544,7 @@ Glob.prototype._readdir = function (abs, inGlobStar, cb) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var self = this
|
var self = this
|
||||||
fs.readdir(abs, readdirCb(this, abs, cb))
|
self.fs.readdir(abs, readdirCb(this, abs, cb))
|
||||||
}
|
}
|
||||||
|
|
||||||
function readdirCb (self, abs, cb) {
|
function readdirCb (self, abs, cb) {
|
||||||
@@ -746,13 +748,13 @@ Glob.prototype._stat = function (f, cb) {
|
|||||||
var self = this
|
var self = this
|
||||||
var statcb = inflight('stat\0' + abs, lstatcb_)
|
var statcb = inflight('stat\0' + abs, lstatcb_)
|
||||||
if (statcb)
|
if (statcb)
|
||||||
fs.lstat(abs, statcb)
|
self.fs.lstat(abs, statcb)
|
||||||
|
|
||||||
function lstatcb_ (er, lstat) {
|
function lstatcb_ (er, lstat) {
|
||||||
if (lstat && lstat.isSymbolicLink()) {
|
if (lstat && lstat.isSymbolicLink()) {
|
||||||
// If it's a symlink, then treat it as the target, unless
|
// If it's a symlink, then treat it as the target, unless
|
||||||
// the target does not exist, then treat it as a file.
|
// the target does not exist, then treat it as a file.
|
||||||
return fs.stat(abs, function (er, stat) {
|
return self.fs.stat(abs, function (er, stat) {
|
||||||
if (er)
|
if (er)
|
||||||
self._stat2(f, abs, null, lstat, cb)
|
self._stat2(f, abs, null, lstat, cb)
|
||||||
else
|
else
|
||||||
|
|||||||
2
node_modules/glob/node_modules/brace-expansion/.github/FUNDING.yml
generated
vendored
Normal file
2
node_modules/glob/node_modules/brace-expansion/.github/FUNDING.yml
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
tidelift: "npm/brace-expansion"
|
||||||
|
patreon: juliangruber
|
||||||
21
node_modules/glob/node_modules/brace-expansion/LICENSE
generated
vendored
Normal file
21
node_modules/glob/node_modules/brace-expansion/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
135
node_modules/glob/node_modules/brace-expansion/README.md
generated
vendored
Normal file
135
node_modules/glob/node_modules/brace-expansion/README.md
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
# brace-expansion
|
||||||
|
|
||||||
|
[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html),
|
||||||
|
as known from sh/bash, in JavaScript.
|
||||||
|
|
||||||
|
[](http://travis-ci.org/juliangruber/brace-expansion)
|
||||||
|
[](https://www.npmjs.org/package/brace-expansion)
|
||||||
|
[](https://greenkeeper.io/)
|
||||||
|
|
||||||
|
[](https://ci.testling.com/juliangruber/brace-expansion)
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
```js
|
||||||
|
var expand = require('brace-expansion');
|
||||||
|
|
||||||
|
expand('file-{a,b,c}.jpg')
|
||||||
|
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
|
||||||
|
|
||||||
|
expand('-v{,,}')
|
||||||
|
// => ['-v', '-v', '-v']
|
||||||
|
|
||||||
|
expand('file{0..2}.jpg')
|
||||||
|
// => ['file0.jpg', 'file1.jpg', 'file2.jpg']
|
||||||
|
|
||||||
|
expand('file-{a..c}.jpg')
|
||||||
|
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
|
||||||
|
|
||||||
|
expand('file{2..0}.jpg')
|
||||||
|
// => ['file2.jpg', 'file1.jpg', 'file0.jpg']
|
||||||
|
|
||||||
|
expand('file{0..4..2}.jpg')
|
||||||
|
// => ['file0.jpg', 'file2.jpg', 'file4.jpg']
|
||||||
|
|
||||||
|
expand('file-{a..e..2}.jpg')
|
||||||
|
// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg']
|
||||||
|
|
||||||
|
expand('file{00..10..5}.jpg')
|
||||||
|
// => ['file00.jpg', 'file05.jpg', 'file10.jpg']
|
||||||
|
|
||||||
|
expand('{{A..C},{a..c}}')
|
||||||
|
// => ['A', 'B', 'C', 'a', 'b', 'c']
|
||||||
|
|
||||||
|
expand('ppp{,config,oe{,conf}}')
|
||||||
|
// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf']
|
||||||
|
```
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
```js
|
||||||
|
var expand = require('brace-expansion');
|
||||||
|
```
|
||||||
|
|
||||||
|
### var expanded = expand(str)
|
||||||
|
|
||||||
|
Return an array of all possible and valid expansions of `str`. If none are
|
||||||
|
found, `[str]` is returned.
|
||||||
|
|
||||||
|
Valid expansions are:
|
||||||
|
|
||||||
|
```js
|
||||||
|
/^(.*,)+(.+)?$/
|
||||||
|
// {a,b,...}
|
||||||
|
```
|
||||||
|
|
||||||
|
A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`.
|
||||||
|
|
||||||
|
```js
|
||||||
|
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
|
||||||
|
// {x..y[..incr]}
|
||||||
|
```
|
||||||
|
|
||||||
|
A numeric sequence from `x` to `y` inclusive, with optional increment.
|
||||||
|
If `x` or `y` start with a leading `0`, all the numbers will be padded
|
||||||
|
to have equal length. Negative numbers and backwards iteration work too.
|
||||||
|
|
||||||
|
```js
|
||||||
|
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
|
||||||
|
// {x..y[..incr]}
|
||||||
|
```
|
||||||
|
|
||||||
|
An alphabetic sequence from `x` to `y` inclusive, with optional increment.
|
||||||
|
`x` and `y` must be exactly one character, and if given, `incr` must be a
|
||||||
|
number.
|
||||||
|
|
||||||
|
For compatibility reasons, the string `${` is not eligible for brace expansion.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
With [npm](https://npmjs.org) do:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install brace-expansion
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contributors
|
||||||
|
|
||||||
|
- [Julian Gruber](https://github.com/juliangruber)
|
||||||
|
- [Isaac Z. Schlueter](https://github.com/isaacs)
|
||||||
|
|
||||||
|
## Sponsors
|
||||||
|
|
||||||
|
This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)!
|
||||||
|
|
||||||
|
Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)!
|
||||||
|
|
||||||
|
## Security contact information
|
||||||
|
|
||||||
|
To report a security vulnerability, please use the
|
||||||
|
[Tidelift security contact](https://tidelift.com/security).
|
||||||
|
Tidelift will coordinate the fix and disclosure.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
(MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||||
|
of the Software, and to permit persons to whom the Software is furnished to do
|
||||||
|
so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
203
node_modules/glob/node_modules/brace-expansion/index.js
generated
vendored
Normal file
203
node_modules/glob/node_modules/brace-expansion/index.js
generated
vendored
Normal file
@@ -0,0 +1,203 @@
|
|||||||
|
var balanced = require('balanced-match');
|
||||||
|
|
||||||
|
module.exports = expandTop;
|
||||||
|
|
||||||
|
var escSlash = '\0SLASH'+Math.random()+'\0';
|
||||||
|
var escOpen = '\0OPEN'+Math.random()+'\0';
|
||||||
|
var escClose = '\0CLOSE'+Math.random()+'\0';
|
||||||
|
var escComma = '\0COMMA'+Math.random()+'\0';
|
||||||
|
var escPeriod = '\0PERIOD'+Math.random()+'\0';
|
||||||
|
|
||||||
|
function numeric(str) {
|
||||||
|
return parseInt(str, 10) == str
|
||||||
|
? parseInt(str, 10)
|
||||||
|
: str.charCodeAt(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
function escapeBraces(str) {
|
||||||
|
return str.split('\\\\').join(escSlash)
|
||||||
|
.split('\\{').join(escOpen)
|
||||||
|
.split('\\}').join(escClose)
|
||||||
|
.split('\\,').join(escComma)
|
||||||
|
.split('\\.').join(escPeriod);
|
||||||
|
}
|
||||||
|
|
||||||
|
function unescapeBraces(str) {
|
||||||
|
return str.split(escSlash).join('\\')
|
||||||
|
.split(escOpen).join('{')
|
||||||
|
.split(escClose).join('}')
|
||||||
|
.split(escComma).join(',')
|
||||||
|
.split(escPeriod).join('.');
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Basically just str.split(","), but handling cases
|
||||||
|
// where we have nested braced sections, which should be
|
||||||
|
// treated as individual members, like {a,{b,c},d}
|
||||||
|
function parseCommaParts(str) {
|
||||||
|
if (!str)
|
||||||
|
return [''];
|
||||||
|
|
||||||
|
var parts = [];
|
||||||
|
var m = balanced('{', '}', str);
|
||||||
|
|
||||||
|
if (!m)
|
||||||
|
return str.split(',');
|
||||||
|
|
||||||
|
var pre = m.pre;
|
||||||
|
var body = m.body;
|
||||||
|
var post = m.post;
|
||||||
|
var p = pre.split(',');
|
||||||
|
|
||||||
|
p[p.length-1] += '{' + body + '}';
|
||||||
|
var postParts = parseCommaParts(post);
|
||||||
|
if (post.length) {
|
||||||
|
p[p.length-1] += postParts.shift();
|
||||||
|
p.push.apply(p, postParts);
|
||||||
|
}
|
||||||
|
|
||||||
|
parts.push.apply(parts, p);
|
||||||
|
|
||||||
|
return parts;
|
||||||
|
}
|
||||||
|
|
||||||
|
function expandTop(str) {
|
||||||
|
if (!str)
|
||||||
|
return [];
|
||||||
|
|
||||||
|
// I don't know why Bash 4.3 does this, but it does.
|
||||||
|
// Anything starting with {} will have the first two bytes preserved
|
||||||
|
// but *only* at the top level, so {},a}b will not expand to anything,
|
||||||
|
// but a{},b}c will be expanded to [a}c,abc].
|
||||||
|
// One could argue that this is a bug in Bash, but since the goal of
|
||||||
|
// this module is to match Bash's rules, we escape a leading {}
|
||||||
|
if (str.substr(0, 2) === '{}') {
|
||||||
|
str = '\\{\\}' + str.substr(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
return expand(escapeBraces(str), true).map(unescapeBraces);
|
||||||
|
}
|
||||||
|
|
||||||
|
function embrace(str) {
|
||||||
|
return '{' + str + '}';
|
||||||
|
}
|
||||||
|
function isPadded(el) {
|
||||||
|
return /^-?0\d/.test(el);
|
||||||
|
}
|
||||||
|
|
||||||
|
function lte(i, y) {
|
||||||
|
return i <= y;
|
||||||
|
}
|
||||||
|
function gte(i, y) {
|
||||||
|
return i >= y;
|
||||||
|
}
|
||||||
|
|
||||||
|
function expand(str, isTop) {
|
||||||
|
var expansions = [];
|
||||||
|
|
||||||
|
var m = balanced('{', '}', str);
|
||||||
|
if (!m) return [str];
|
||||||
|
|
||||||
|
// no need to expand pre, since it is guaranteed to be free of brace-sets
|
||||||
|
var pre = m.pre;
|
||||||
|
var post = m.post.length
|
||||||
|
? expand(m.post, false)
|
||||||
|
: [''];
|
||||||
|
|
||||||
|
if (/\$$/.test(m.pre)) {
|
||||||
|
for (var k = 0; k < post.length; k++) {
|
||||||
|
var expansion = pre+ '{' + m.body + '}' + post[k];
|
||||||
|
expansions.push(expansion);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
|
||||||
|
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
|
||||||
|
var isSequence = isNumericSequence || isAlphaSequence;
|
||||||
|
var isOptions = m.body.indexOf(',') >= 0;
|
||||||
|
if (!isSequence && !isOptions) {
|
||||||
|
// {a},b}
|
||||||
|
if (m.post.match(/,.*\}/)) {
|
||||||
|
str = m.pre + '{' + m.body + escClose + m.post;
|
||||||
|
return expand(str);
|
||||||
|
}
|
||||||
|
return [str];
|
||||||
|
}
|
||||||
|
|
||||||
|
var n;
|
||||||
|
if (isSequence) {
|
||||||
|
n = m.body.split(/\.\./);
|
||||||
|
} else {
|
||||||
|
n = parseCommaParts(m.body);
|
||||||
|
if (n.length === 1) {
|
||||||
|
// x{{a,b}}y ==> x{a}y x{b}y
|
||||||
|
n = expand(n[0], false).map(embrace);
|
||||||
|
if (n.length === 1) {
|
||||||
|
return post.map(function(p) {
|
||||||
|
return m.pre + n[0] + p;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// at this point, n is the parts, and we know it's not a comma set
|
||||||
|
// with a single entry.
|
||||||
|
var N;
|
||||||
|
|
||||||
|
if (isSequence) {
|
||||||
|
var x = numeric(n[0]);
|
||||||
|
var y = numeric(n[1]);
|
||||||
|
var width = Math.max(n[0].length, n[1].length)
|
||||||
|
var incr = n.length == 3
|
||||||
|
? Math.abs(numeric(n[2]))
|
||||||
|
: 1;
|
||||||
|
var test = lte;
|
||||||
|
var reverse = y < x;
|
||||||
|
if (reverse) {
|
||||||
|
incr *= -1;
|
||||||
|
test = gte;
|
||||||
|
}
|
||||||
|
var pad = n.some(isPadded);
|
||||||
|
|
||||||
|
N = [];
|
||||||
|
|
||||||
|
for (var i = x; test(i, y); i += incr) {
|
||||||
|
var c;
|
||||||
|
if (isAlphaSequence) {
|
||||||
|
c = String.fromCharCode(i);
|
||||||
|
if (c === '\\')
|
||||||
|
c = '';
|
||||||
|
} else {
|
||||||
|
c = String(i);
|
||||||
|
if (pad) {
|
||||||
|
var need = width - c.length;
|
||||||
|
if (need > 0) {
|
||||||
|
var z = new Array(need + 1).join('0');
|
||||||
|
if (i < 0)
|
||||||
|
c = '-' + z + c.slice(1);
|
||||||
|
else
|
||||||
|
c = z + c;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
N.push(c);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
N = [];
|
||||||
|
|
||||||
|
for (var j = 0; j < n.length; j++) {
|
||||||
|
N.push.apply(N, expand(n[j], false));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var j = 0; j < N.length; j++) {
|
||||||
|
for (var k = 0; k < post.length; k++) {
|
||||||
|
var expansion = pre + N[j] + post[k];
|
||||||
|
if (!isTop || isSequence || expansion)
|
||||||
|
expansions.push(expansion);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return expansions;
|
||||||
|
}
|
||||||
|
|
||||||
46
node_modules/glob/node_modules/brace-expansion/package.json
generated
vendored
Normal file
46
node_modules/glob/node_modules/brace-expansion/package.json
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
{
|
||||||
|
"name": "brace-expansion",
|
||||||
|
"description": "Brace expansion as known from sh/bash",
|
||||||
|
"version": "2.0.1",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/juliangruber/brace-expansion.git"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/juliangruber/brace-expansion",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "tape test/*.js",
|
||||||
|
"gentest": "bash test/generate.sh",
|
||||||
|
"bench": "matcha test/perf/bench.js"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"balanced-match": "^1.0.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@c4312/matcha": "^1.3.1",
|
||||||
|
"tape": "^4.6.0"
|
||||||
|
},
|
||||||
|
"keywords": [],
|
||||||
|
"author": {
|
||||||
|
"name": "Julian Gruber",
|
||||||
|
"email": "mail@juliangruber.com",
|
||||||
|
"url": "http://juliangruber.com"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"testling": {
|
||||||
|
"files": "test/*.js",
|
||||||
|
"browsers": [
|
||||||
|
"ie/8..latest",
|
||||||
|
"firefox/20..latest",
|
||||||
|
"firefox/nightly",
|
||||||
|
"chrome/25..latest",
|
||||||
|
"chrome/canary",
|
||||||
|
"opera/12..latest",
|
||||||
|
"opera/next",
|
||||||
|
"safari/5.1..latest",
|
||||||
|
"ipad/6.0..latest",
|
||||||
|
"iphone/6.0..latest",
|
||||||
|
"android-browser/4.2..latest"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
15
node_modules/glob/node_modules/minimatch/LICENSE
generated
vendored
Normal file
15
node_modules/glob/node_modules/minimatch/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
The ISC License
|
||||||
|
|
||||||
|
Copyright (c) 2011-2022 Isaac Z. Schlueter and Contributors
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||||
|
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
244
node_modules/glob/node_modules/minimatch/README.md
generated
vendored
Normal file
244
node_modules/glob/node_modules/minimatch/README.md
generated
vendored
Normal file
@@ -0,0 +1,244 @@
|
|||||||
|
# minimatch
|
||||||
|
|
||||||
|
A minimal matching utility.
|
||||||
|
|
||||||
|
[](http://travis-ci.org/isaacs/minimatch)
|
||||||
|
|
||||||
|
|
||||||
|
This is the matching library used internally by npm.
|
||||||
|
|
||||||
|
It works by converting glob expressions into JavaScript `RegExp`
|
||||||
|
objects.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var minimatch = require("minimatch")
|
||||||
|
|
||||||
|
minimatch("bar.foo", "*.foo") // true!
|
||||||
|
minimatch("bar.foo", "*.bar") // false!
|
||||||
|
minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy!
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
Supports these glob features:
|
||||||
|
|
||||||
|
* Brace Expansion
|
||||||
|
* Extended glob matching
|
||||||
|
* "Globstar" `**` matching
|
||||||
|
|
||||||
|
See:
|
||||||
|
|
||||||
|
* `man sh`
|
||||||
|
* `man bash`
|
||||||
|
* `man 3 fnmatch`
|
||||||
|
* `man 5 gitignore`
|
||||||
|
|
||||||
|
## Windows
|
||||||
|
|
||||||
|
**Please only use forward-slashes in glob expressions.**
|
||||||
|
|
||||||
|
Though windows uses either `/` or `\` as its path separator, only `/`
|
||||||
|
characters are used by this glob implementation. You must use
|
||||||
|
forward-slashes **only** in glob expressions. Back-slashes in patterns
|
||||||
|
will always be interpreted as escape characters, not path separators.
|
||||||
|
|
||||||
|
Note that `\` or `/` _will_ be interpreted as path separators in paths on
|
||||||
|
Windows, and will match against `/` in glob expressions.
|
||||||
|
|
||||||
|
So just always use `/` in patterns.
|
||||||
|
|
||||||
|
## Minimatch Class
|
||||||
|
|
||||||
|
Create a minimatch object by instantiating the `minimatch.Minimatch` class.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var Minimatch = require("minimatch").Minimatch
|
||||||
|
var mm = new Minimatch(pattern, options)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
* `pattern` The original pattern the minimatch object represents.
|
||||||
|
* `options` The options supplied to the constructor.
|
||||||
|
* `set` A 2-dimensional array of regexp or string expressions.
|
||||||
|
Each row in the
|
||||||
|
array corresponds to a brace-expanded pattern. Each item in the row
|
||||||
|
corresponds to a single path-part. For example, the pattern
|
||||||
|
`{a,b/c}/d` would expand to a set of patterns like:
|
||||||
|
|
||||||
|
[ [ a, d ]
|
||||||
|
, [ b, c, d ] ]
|
||||||
|
|
||||||
|
If a portion of the pattern doesn't have any "magic" in it
|
||||||
|
(that is, it's something like `"foo"` rather than `fo*o?`), then it
|
||||||
|
will be left as a string rather than converted to a regular
|
||||||
|
expression.
|
||||||
|
|
||||||
|
* `regexp` Created by the `makeRe` method. A single regular expression
|
||||||
|
expressing the entire pattern. This is useful in cases where you wish
|
||||||
|
to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled.
|
||||||
|
* `negate` True if the pattern is negated.
|
||||||
|
* `comment` True if the pattern is a comment.
|
||||||
|
* `empty` True if the pattern is `""`.
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
|
||||||
|
* `makeRe` Generate the `regexp` member if necessary, and return it.
|
||||||
|
Will return `false` if the pattern is invalid.
|
||||||
|
* `match(fname)` Return true if the filename matches the pattern, or
|
||||||
|
false otherwise.
|
||||||
|
* `matchOne(fileArray, patternArray, partial)` Take a `/`-split
|
||||||
|
filename, and match it against a single row in the `regExpSet`. This
|
||||||
|
method is mainly for internal use, but is exposed so that it can be
|
||||||
|
used by a glob-walker that needs to avoid excessive filesystem calls.
|
||||||
|
|
||||||
|
All other methods are internal, and will be called as necessary.
|
||||||
|
|
||||||
|
### minimatch(path, pattern, options)
|
||||||
|
|
||||||
|
Main export. Tests a path against the pattern using the options.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var isJS = minimatch(file, "*.js", { matchBase: true })
|
||||||
|
```
|
||||||
|
|
||||||
|
### minimatch.filter(pattern, options)
|
||||||
|
|
||||||
|
Returns a function that tests its
|
||||||
|
supplied argument, suitable for use with `Array.filter`. Example:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true}))
|
||||||
|
```
|
||||||
|
|
||||||
|
### minimatch.match(list, pattern, options)
|
||||||
|
|
||||||
|
Match against the list of
|
||||||
|
files, in the style of fnmatch or glob. If nothing is matched, and
|
||||||
|
options.nonull is set, then return a list containing the pattern itself.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var javascripts = minimatch.match(fileList, "*.js", {matchBase: true}))
|
||||||
|
```
|
||||||
|
|
||||||
|
### minimatch.makeRe(pattern, options)
|
||||||
|
|
||||||
|
Make a regular expression object from the pattern.
|
||||||
|
|
||||||
|
## Options
|
||||||
|
|
||||||
|
All options are `false` by default.
|
||||||
|
|
||||||
|
### debug
|
||||||
|
|
||||||
|
Dump a ton of stuff to stderr.
|
||||||
|
|
||||||
|
### nobrace
|
||||||
|
|
||||||
|
Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||||
|
|
||||||
|
### noglobstar
|
||||||
|
|
||||||
|
Disable `**` matching against multiple folder names.
|
||||||
|
|
||||||
|
### dot
|
||||||
|
|
||||||
|
Allow patterns to match filenames starting with a period, even if
|
||||||
|
the pattern does not explicitly have a period in that spot.
|
||||||
|
|
||||||
|
Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot`
|
||||||
|
is set.
|
||||||
|
|
||||||
|
### noext
|
||||||
|
|
||||||
|
Disable "extglob" style patterns like `+(a|b)`.
|
||||||
|
|
||||||
|
### nocase
|
||||||
|
|
||||||
|
Perform a case-insensitive match.
|
||||||
|
|
||||||
|
### nonull
|
||||||
|
|
||||||
|
When a match is not found by `minimatch.match`, return a list containing
|
||||||
|
the pattern itself if this option is set. When not set, an empty list
|
||||||
|
is returned if there are no matches.
|
||||||
|
|
||||||
|
### matchBase
|
||||||
|
|
||||||
|
If set, then patterns without slashes will be matched
|
||||||
|
against the basename of the path if it contains slashes. For example,
|
||||||
|
`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.
|
||||||
|
|
||||||
|
### nocomment
|
||||||
|
|
||||||
|
Suppress the behavior of treating `#` at the start of a pattern as a
|
||||||
|
comment.
|
||||||
|
|
||||||
|
### nonegate
|
||||||
|
|
||||||
|
Suppress the behavior of treating a leading `!` character as negation.
|
||||||
|
|
||||||
|
### flipNegate
|
||||||
|
|
||||||
|
Returns from negate expressions the same as if they were not negated.
|
||||||
|
(Ie, true on a hit, false on a miss.)
|
||||||
|
|
||||||
|
### partial
|
||||||
|
|
||||||
|
Compare a partial path to a pattern. As long as the parts of the path that
|
||||||
|
are present are not contradicted by the pattern, it will be treated as a
|
||||||
|
match. This is useful in applications where you're walking through a
|
||||||
|
folder structure, and don't yet have the full path, but want to ensure that
|
||||||
|
you do not walk down paths that can never be a match.
|
||||||
|
|
||||||
|
For example,
|
||||||
|
|
||||||
|
```js
|
||||||
|
minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d
|
||||||
|
minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d
|
||||||
|
minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a
|
||||||
|
```
|
||||||
|
|
||||||
|
## Comparisons to other fnmatch/glob implementations
|
||||||
|
|
||||||
|
While strict compliance with the existing standards is a worthwhile
|
||||||
|
goal, some discrepancies exist between minimatch and other
|
||||||
|
implementations, and are intentional.
|
||||||
|
|
||||||
|
If the pattern starts with a `!` character, then it is negated. Set the
|
||||||
|
`nonegate` flag to suppress this behavior, and treat leading `!`
|
||||||
|
characters normally. This is perhaps relevant if you wish to start the
|
||||||
|
pattern with a negative extglob pattern like `!(a|B)`. Multiple `!`
|
||||||
|
characters at the start of a pattern will negate the pattern multiple
|
||||||
|
times.
|
||||||
|
|
||||||
|
If a pattern starts with `#`, then it is treated as a comment, and
|
||||||
|
will not match anything. Use `\#` to match a literal `#` at the
|
||||||
|
start of a line, or set the `nocomment` flag to suppress this behavior.
|
||||||
|
|
||||||
|
The double-star character `**` is supported by default, unless the
|
||||||
|
`noglobstar` flag is set. This is supported in the manner of bsdglob
|
||||||
|
and bash 4.1, where `**` only has special significance if it is the only
|
||||||
|
thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
|
||||||
|
`a/**b` will not.
|
||||||
|
|
||||||
|
If an escaped pattern has no matches, and the `nonull` flag is set,
|
||||||
|
then minimatch.match returns the pattern as-provided, rather than
|
||||||
|
interpreting the character escapes. For example,
|
||||||
|
`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
|
||||||
|
`"*a?"`. This is akin to setting the `nullglob` option in bash, except
|
||||||
|
that it does not resolve escaped pattern characters.
|
||||||
|
|
||||||
|
If brace expansion is not disabled, then it is performed before any
|
||||||
|
other interpretation of the glob pattern. Thus, a pattern like
|
||||||
|
`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
|
||||||
|
**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
|
||||||
|
checked for validity. Since those two are valid, matching proceeds.
|
||||||
|
|
||||||
|
Note that `fnmatch(3)` in libc is an extremely naive string comparison
|
||||||
|
matcher, which does not do anything special for slashes. This library is
|
||||||
|
designed to be used in glob searching and file walkers, and so it does do
|
||||||
|
special things with `/`. Thus, `foo*` will not match `foo/bar` in this
|
||||||
|
library, even though it would in `fnmatch(3)`.
|
||||||
4
node_modules/glob/node_modules/minimatch/lib/path.js
generated
vendored
Normal file
4
node_modules/glob/node_modules/minimatch/lib/path.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
const isWindows = typeof process === 'object' &&
|
||||||
|
process &&
|
||||||
|
process.platform === 'win32'
|
||||||
|
module.exports = isWindows ? { sep: '\\' } : { sep: '/' }
|
||||||
901
node_modules/glob/node_modules/minimatch/minimatch.js
generated
vendored
Normal file
901
node_modules/glob/node_modules/minimatch/minimatch.js
generated
vendored
Normal file
@@ -0,0 +1,901 @@
|
|||||||
|
const minimatch = module.exports = (p, pattern, options = {}) => {
|
||||||
|
assertValidPattern(pattern)
|
||||||
|
|
||||||
|
// shortcut: comments match nothing.
|
||||||
|
if (!options.nocomment && pattern.charAt(0) === '#') {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Minimatch(pattern, options).match(p)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = minimatch
|
||||||
|
|
||||||
|
const path = require('./lib/path.js')
|
||||||
|
minimatch.sep = path.sep
|
||||||
|
|
||||||
|
const GLOBSTAR = Symbol('globstar **')
|
||||||
|
minimatch.GLOBSTAR = GLOBSTAR
|
||||||
|
const expand = require('brace-expansion')
|
||||||
|
|
||||||
|
const plTypes = {
|
||||||
|
'!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
|
||||||
|
'?': { open: '(?:', close: ')?' },
|
||||||
|
'+': { open: '(?:', close: ')+' },
|
||||||
|
'*': { open: '(?:', close: ')*' },
|
||||||
|
'@': { open: '(?:', close: ')' }
|
||||||
|
}
|
||||||
|
|
||||||
|
// any single thing other than /
|
||||||
|
// don't need to escape / when using new RegExp()
|
||||||
|
const qmark = '[^/]'
|
||||||
|
|
||||||
|
// * => any number of characters
|
||||||
|
const star = qmark + '*?'
|
||||||
|
|
||||||
|
// ** when dots are allowed. Anything goes, except .. and .
|
||||||
|
// not (^ or / followed by one or two dots followed by $ or /),
|
||||||
|
// followed by anything, any number of times.
|
||||||
|
const twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
|
||||||
|
|
||||||
|
// not a ^ or / followed by a dot,
|
||||||
|
// followed by anything, any number of times.
|
||||||
|
const twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
|
||||||
|
|
||||||
|
// "abc" -> { a:true, b:true, c:true }
|
||||||
|
const charSet = s => s.split('').reduce((set, c) => {
|
||||||
|
set[c] = true
|
||||||
|
return set
|
||||||
|
}, {})
|
||||||
|
|
||||||
|
// characters that need to be escaped in RegExp.
|
||||||
|
const reSpecials = charSet('().*{}+?[]^$\\!')
|
||||||
|
|
||||||
|
// characters that indicate we have to add the pattern start
|
||||||
|
const addPatternStartSet = charSet('[.(')
|
||||||
|
|
||||||
|
// normalizes slashes.
|
||||||
|
const slashSplit = /\/+/
|
||||||
|
|
||||||
|
minimatch.filter = (pattern, options = {}) =>
|
||||||
|
(p, i, list) => minimatch(p, pattern, options)
|
||||||
|
|
||||||
|
const ext = (a, b = {}) => {
|
||||||
|
const t = {}
|
||||||
|
Object.keys(a).forEach(k => t[k] = a[k])
|
||||||
|
Object.keys(b).forEach(k => t[k] = b[k])
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
minimatch.defaults = def => {
|
||||||
|
if (!def || typeof def !== 'object' || !Object.keys(def).length) {
|
||||||
|
return minimatch
|
||||||
|
}
|
||||||
|
|
||||||
|
const orig = minimatch
|
||||||
|
|
||||||
|
const m = (p, pattern, options) => orig(p, pattern, ext(def, options))
|
||||||
|
m.Minimatch = class Minimatch extends orig.Minimatch {
|
||||||
|
constructor (pattern, options) {
|
||||||
|
super(pattern, ext(def, options))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
m.Minimatch.defaults = options => orig.defaults(ext(def, options)).Minimatch
|
||||||
|
m.filter = (pattern, options) => orig.filter(pattern, ext(def, options))
|
||||||
|
m.defaults = options => orig.defaults(ext(def, options))
|
||||||
|
m.makeRe = (pattern, options) => orig.makeRe(pattern, ext(def, options))
|
||||||
|
m.braceExpand = (pattern, options) => orig.braceExpand(pattern, ext(def, options))
|
||||||
|
m.match = (list, pattern, options) => orig.match(list, pattern, ext(def, options))
|
||||||
|
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// Brace expansion:
|
||||||
|
// a{b,c}d -> abd acd
|
||||||
|
// a{b,}c -> abc ac
|
||||||
|
// a{0..3}d -> a0d a1d a2d a3d
|
||||||
|
// a{b,c{d,e}f}g -> abg acdfg acefg
|
||||||
|
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
|
||||||
|
//
|
||||||
|
// Invalid sets are not expanded.
|
||||||
|
// a{2..}b -> a{2..}b
|
||||||
|
// a{b}c -> a{b}c
|
||||||
|
minimatch.braceExpand = (pattern, options) => braceExpand(pattern, options)
|
||||||
|
|
||||||
|
const braceExpand = (pattern, options = {}) => {
|
||||||
|
assertValidPattern(pattern)
|
||||||
|
|
||||||
|
// Thanks to Yeting Li <https://github.com/yetingli> for
|
||||||
|
// improving this regexp to avoid a ReDOS vulnerability.
|
||||||
|
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
|
||||||
|
// shortcut. no need to expand.
|
||||||
|
return [pattern]
|
||||||
|
}
|
||||||
|
|
||||||
|
return expand(pattern)
|
||||||
|
}
|
||||||
|
|
||||||
|
const MAX_PATTERN_LENGTH = 1024 * 64
|
||||||
|
const assertValidPattern = pattern => {
|
||||||
|
if (typeof pattern !== 'string') {
|
||||||
|
throw new TypeError('invalid pattern')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pattern.length > MAX_PATTERN_LENGTH) {
|
||||||
|
throw new TypeError('pattern is too long')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parse a component of the expanded set.
|
||||||
|
// At this point, no pattern may contain "/" in it
|
||||||
|
// so we're going to return a 2d array, where each entry is the full
|
||||||
|
// pattern, split on '/', and then turned into a regular expression.
|
||||||
|
// A regexp is made at the end which joins each array with an
|
||||||
|
// escaped /, and another full one which joins each regexp with |.
|
||||||
|
//
|
||||||
|
// Following the lead of Bash 4.1, note that "**" only has special meaning
|
||||||
|
// when it is the *only* thing in a path portion. Otherwise, any series
|
||||||
|
// of * is equivalent to a single *. Globstar behavior is enabled by
|
||||||
|
// default, and can be disabled by setting options.noglobstar.
|
||||||
|
const SUBPARSE = Symbol('subparse')
|
||||||
|
|
||||||
|
minimatch.makeRe = (pattern, options) =>
|
||||||
|
new Minimatch(pattern, options || {}).makeRe()
|
||||||
|
|
||||||
|
minimatch.match = (list, pattern, options = {}) => {
|
||||||
|
const mm = new Minimatch(pattern, options)
|
||||||
|
list = list.filter(f => mm.match(f))
|
||||||
|
if (mm.options.nonull && !list.length) {
|
||||||
|
list.push(pattern)
|
||||||
|
}
|
||||||
|
return list
|
||||||
|
}
|
||||||
|
|
||||||
|
// replace stuff like \* with *
|
||||||
|
const globUnescape = s => s.replace(/\\(.)/g, '$1')
|
||||||
|
const regExpEscape = s => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
|
||||||
|
|
||||||
|
class Minimatch {
|
||||||
|
constructor (pattern, options) {
|
||||||
|
assertValidPattern(pattern)
|
||||||
|
|
||||||
|
if (!options) options = {}
|
||||||
|
|
||||||
|
this.options = options
|
||||||
|
this.set = []
|
||||||
|
this.pattern = pattern
|
||||||
|
this.regexp = null
|
||||||
|
this.negate = false
|
||||||
|
this.comment = false
|
||||||
|
this.empty = false
|
||||||
|
this.partial = !!options.partial
|
||||||
|
|
||||||
|
// make the set of regexps etc.
|
||||||
|
this.make()
|
||||||
|
}
|
||||||
|
|
||||||
|
debug () {}
|
||||||
|
|
||||||
|
make () {
|
||||||
|
const pattern = this.pattern
|
||||||
|
const options = this.options
|
||||||
|
|
||||||
|
// empty patterns and comments match nothing.
|
||||||
|
if (!options.nocomment && pattern.charAt(0) === '#') {
|
||||||
|
this.comment = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (!pattern) {
|
||||||
|
this.empty = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// step 1: figure out negation, etc.
|
||||||
|
this.parseNegate()
|
||||||
|
|
||||||
|
// step 2: expand braces
|
||||||
|
let set = this.globSet = this.braceExpand()
|
||||||
|
|
||||||
|
if (options.debug) this.debug = (...args) => console.error(...args)
|
||||||
|
|
||||||
|
this.debug(this.pattern, set)
|
||||||
|
|
||||||
|
// step 3: now we have a set, so turn each one into a series of path-portion
|
||||||
|
// matching patterns.
|
||||||
|
// These will be regexps, except in the case of "**", which is
|
||||||
|
// set to the GLOBSTAR object for globstar behavior,
|
||||||
|
// and will not contain any / characters
|
||||||
|
set = this.globParts = set.map(s => s.split(slashSplit))
|
||||||
|
|
||||||
|
this.debug(this.pattern, set)
|
||||||
|
|
||||||
|
// glob --> regexps
|
||||||
|
set = set.map((s, si, set) => s.map(this.parse, this))
|
||||||
|
|
||||||
|
this.debug(this.pattern, set)
|
||||||
|
|
||||||
|
// filter out everything that didn't compile properly.
|
||||||
|
set = set.filter(s => s.indexOf(false) === -1)
|
||||||
|
|
||||||
|
this.debug(this.pattern, set)
|
||||||
|
|
||||||
|
this.set = set
|
||||||
|
}
|
||||||
|
|
||||||
|
parseNegate () {
|
||||||
|
if (this.options.nonegate) return
|
||||||
|
|
||||||
|
const pattern = this.pattern
|
||||||
|
let negate = false
|
||||||
|
let negateOffset = 0
|
||||||
|
|
||||||
|
for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
|
||||||
|
negate = !negate
|
||||||
|
negateOffset++
|
||||||
|
}
|
||||||
|
|
||||||
|
if (negateOffset) this.pattern = pattern.substr(negateOffset)
|
||||||
|
this.negate = negate
|
||||||
|
}
|
||||||
|
|
||||||
|
// set partial to true to test if, for example,
|
||||||
|
// "/a/b" matches the start of "/*/b/*/d"
|
||||||
|
// Partial means, if you run out of file before you run
|
||||||
|
// out of pattern, then that's fine, as long as all
|
||||||
|
// the parts match.
|
||||||
|
matchOne (file, pattern, partial) {
|
||||||
|
var options = this.options
|
||||||
|
|
||||||
|
this.debug('matchOne',
|
||||||
|
{ 'this': this, file: file, pattern: pattern })
|
||||||
|
|
||||||
|
this.debug('matchOne', file.length, pattern.length)
|
||||||
|
|
||||||
|
for (var fi = 0,
|
||||||
|
pi = 0,
|
||||||
|
fl = file.length,
|
||||||
|
pl = pattern.length
|
||||||
|
; (fi < fl) && (pi < pl)
|
||||||
|
; fi++, pi++) {
|
||||||
|
this.debug('matchOne loop')
|
||||||
|
var p = pattern[pi]
|
||||||
|
var f = file[fi]
|
||||||
|
|
||||||
|
this.debug(pattern, p, f)
|
||||||
|
|
||||||
|
// should be impossible.
|
||||||
|
// some invalid regexp stuff in the set.
|
||||||
|
/* istanbul ignore if */
|
||||||
|
if (p === false) return false
|
||||||
|
|
||||||
|
if (p === GLOBSTAR) {
|
||||||
|
this.debug('GLOBSTAR', [pattern, p, f])
|
||||||
|
|
||||||
|
// "**"
|
||||||
|
// a/**/b/**/c would match the following:
|
||||||
|
// a/b/x/y/z/c
|
||||||
|
// a/x/y/z/b/c
|
||||||
|
// a/b/x/b/x/c
|
||||||
|
// a/b/c
|
||||||
|
// To do this, take the rest of the pattern after
|
||||||
|
// the **, and see if it would match the file remainder.
|
||||||
|
// If so, return success.
|
||||||
|
// If not, the ** "swallows" a segment, and try again.
|
||||||
|
// This is recursively awful.
|
||||||
|
//
|
||||||
|
// a/**/b/**/c matching a/b/x/y/z/c
|
||||||
|
// - a matches a
|
||||||
|
// - doublestar
|
||||||
|
// - matchOne(b/x/y/z/c, b/**/c)
|
||||||
|
// - b matches b
|
||||||
|
// - doublestar
|
||||||
|
// - matchOne(x/y/z/c, c) -> no
|
||||||
|
// - matchOne(y/z/c, c) -> no
|
||||||
|
// - matchOne(z/c, c) -> no
|
||||||
|
// - matchOne(c, c) yes, hit
|
||||||
|
var fr = fi
|
||||||
|
var pr = pi + 1
|
||||||
|
if (pr === pl) {
|
||||||
|
this.debug('** at the end')
|
||||||
|
// a ** at the end will just swallow the rest.
|
||||||
|
// We have found a match.
|
||||||
|
// however, it will not swallow /.x, unless
|
||||||
|
// options.dot is set.
|
||||||
|
// . and .. are *never* matched by **, for explosively
|
||||||
|
// exponential reasons.
|
||||||
|
for (; fi < fl; fi++) {
|
||||||
|
if (file[fi] === '.' || file[fi] === '..' ||
|
||||||
|
(!options.dot && file[fi].charAt(0) === '.')) return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// ok, let's see if we can swallow whatever we can.
|
||||||
|
while (fr < fl) {
|
||||||
|
var swallowee = file[fr]
|
||||||
|
|
||||||
|
this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
|
||||||
|
|
||||||
|
// XXX remove this slice. Just pass the start index.
|
||||||
|
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
|
||||||
|
this.debug('globstar found match!', fr, fl, swallowee)
|
||||||
|
// found a match.
|
||||||
|
return true
|
||||||
|
} else {
|
||||||
|
// can't swallow "." or ".." ever.
|
||||||
|
// can only swallow ".foo" when explicitly asked.
|
||||||
|
if (swallowee === '.' || swallowee === '..' ||
|
||||||
|
(!options.dot && swallowee.charAt(0) === '.')) {
|
||||||
|
this.debug('dot detected!', file, fr, pattern, pr)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// ** swallows a segment, and continue.
|
||||||
|
this.debug('globstar swallow a segment, and continue')
|
||||||
|
fr++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no match was found.
|
||||||
|
// However, in partial mode, we can't say this is necessarily over.
|
||||||
|
// If there's more *pattern* left, then
|
||||||
|
/* istanbul ignore if */
|
||||||
|
if (partial) {
|
||||||
|
// ran out of file
|
||||||
|
this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
|
||||||
|
if (fr === fl) return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// something other than **
|
||||||
|
// non-magic patterns just have to match exactly
|
||||||
|
// patterns with magic have been turned into regexps.
|
||||||
|
var hit
|
||||||
|
if (typeof p === 'string') {
|
||||||
|
hit = f === p
|
||||||
|
this.debug('string match', p, f, hit)
|
||||||
|
} else {
|
||||||
|
hit = f.match(p)
|
||||||
|
this.debug('pattern match', p, f, hit)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!hit) return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: ending in / means that we'll get a final ""
|
||||||
|
// at the end of the pattern. This can only match a
|
||||||
|
// corresponding "" at the end of the file.
|
||||||
|
// If the file ends in /, then it can only match a
|
||||||
|
// a pattern that ends in /, unless the pattern just
|
||||||
|
// doesn't have any more for it. But, a/b/ should *not*
|
||||||
|
// match "a/b/*", even though "" matches against the
|
||||||
|
// [^/]*? pattern, except in partial mode, where it might
|
||||||
|
// simply not be reached yet.
|
||||||
|
// However, a/b/ should still satisfy a/*
|
||||||
|
|
||||||
|
// now either we fell off the end of the pattern, or we're done.
|
||||||
|
if (fi === fl && pi === pl) {
|
||||||
|
// ran out of pattern and filename at the same time.
|
||||||
|
// an exact hit!
|
||||||
|
return true
|
||||||
|
} else if (fi === fl) {
|
||||||
|
// ran out of file, but still had pattern left.
|
||||||
|
// this is ok if we're doing the match as part of
|
||||||
|
// a glob fs traversal.
|
||||||
|
return partial
|
||||||
|
} else /* istanbul ignore else */ if (pi === pl) {
|
||||||
|
// ran out of pattern, still have file left.
|
||||||
|
// this is only acceptable if we're on the very last
|
||||||
|
// empty segment of a file with a trailing slash.
|
||||||
|
// a/* should match a/b/
|
||||||
|
return (fi === fl - 1) && (file[fi] === '')
|
||||||
|
}
|
||||||
|
|
||||||
|
// should be unreachable.
|
||||||
|
/* istanbul ignore next */
|
||||||
|
throw new Error('wtf?')
|
||||||
|
}
|
||||||
|
|
||||||
|
braceExpand () {
|
||||||
|
return braceExpand(this.pattern, this.options)
|
||||||
|
}
|
||||||
|
|
||||||
|
parse (pattern, isSub) {
|
||||||
|
assertValidPattern(pattern)
|
||||||
|
|
||||||
|
const options = this.options
|
||||||
|
|
||||||
|
// shortcuts
|
||||||
|
if (pattern === '**') {
|
||||||
|
if (!options.noglobstar)
|
||||||
|
return GLOBSTAR
|
||||||
|
else
|
||||||
|
pattern = '*'
|
||||||
|
}
|
||||||
|
if (pattern === '') return ''
|
||||||
|
|
||||||
|
let re = ''
|
||||||
|
let hasMagic = !!options.nocase
|
||||||
|
let escaping = false
|
||||||
|
// ? => one single character
|
||||||
|
const patternListStack = []
|
||||||
|
const negativeLists = []
|
||||||
|
let stateChar
|
||||||
|
let inClass = false
|
||||||
|
let reClassStart = -1
|
||||||
|
let classStart = -1
|
||||||
|
let cs
|
||||||
|
let pl
|
||||||
|
let sp
|
||||||
|
// . and .. never match anything that doesn't start with .,
|
||||||
|
// even when options.dot is set.
|
||||||
|
const patternStart = pattern.charAt(0) === '.' ? '' // anything
|
||||||
|
// not (start or / followed by . or .. followed by / or end)
|
||||||
|
: options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
|
||||||
|
: '(?!\\.)'
|
||||||
|
|
||||||
|
const clearStateChar = () => {
|
||||||
|
if (stateChar) {
|
||||||
|
// we had some state-tracking character
|
||||||
|
// that wasn't consumed by this pass.
|
||||||
|
switch (stateChar) {
|
||||||
|
case '*':
|
||||||
|
re += star
|
||||||
|
hasMagic = true
|
||||||
|
break
|
||||||
|
case '?':
|
||||||
|
re += qmark
|
||||||
|
hasMagic = true
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
re += '\\' + stateChar
|
||||||
|
break
|
||||||
|
}
|
||||||
|
this.debug('clearStateChar %j %j', stateChar, re)
|
||||||
|
stateChar = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0, c; (i < pattern.length) && (c = pattern.charAt(i)); i++) {
|
||||||
|
this.debug('%s\t%s %s %j', pattern, i, re, c)
|
||||||
|
|
||||||
|
// skip over any that are escaped.
|
||||||
|
if (escaping) {
|
||||||
|
/* istanbul ignore next - completely not allowed, even escaped. */
|
||||||
|
if (c === '/') {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (reSpecials[c]) {
|
||||||
|
re += '\\'
|
||||||
|
}
|
||||||
|
re += c
|
||||||
|
escaping = false
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (c) {
|
||||||
|
/* istanbul ignore next */
|
||||||
|
case '/': {
|
||||||
|
// Should already be path-split by now.
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
case '\\':
|
||||||
|
clearStateChar()
|
||||||
|
escaping = true
|
||||||
|
continue
|
||||||
|
|
||||||
|
// the various stateChar values
|
||||||
|
// for the "extglob" stuff.
|
||||||
|
case '?':
|
||||||
|
case '*':
|
||||||
|
case '+':
|
||||||
|
case '@':
|
||||||
|
case '!':
|
||||||
|
this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
|
||||||
|
|
||||||
|
// all of those are literals inside a class, except that
|
||||||
|
// the glob [!a] means [^a] in regexp
|
||||||
|
if (inClass) {
|
||||||
|
this.debug(' in class')
|
||||||
|
if (c === '!' && i === classStart + 1) c = '^'
|
||||||
|
re += c
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// if we already have a stateChar, then it means
|
||||||
|
// that there was something like ** or +? in there.
|
||||||
|
// Handle the stateChar, then proceed with this one.
|
||||||
|
this.debug('call clearStateChar %j', stateChar)
|
||||||
|
clearStateChar()
|
||||||
|
stateChar = c
|
||||||
|
// if extglob is disabled, then +(asdf|foo) isn't a thing.
|
||||||
|
// just clear the statechar *now*, rather than even diving into
|
||||||
|
// the patternList stuff.
|
||||||
|
if (options.noext) clearStateChar()
|
||||||
|
continue
|
||||||
|
|
||||||
|
case '(':
|
||||||
|
if (inClass) {
|
||||||
|
re += '('
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!stateChar) {
|
||||||
|
re += '\\('
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
patternListStack.push({
|
||||||
|
type: stateChar,
|
||||||
|
start: i - 1,
|
||||||
|
reStart: re.length,
|
||||||
|
open: plTypes[stateChar].open,
|
||||||
|
close: plTypes[stateChar].close
|
||||||
|
})
|
||||||
|
// negation is (?:(?!js)[^/]*)
|
||||||
|
re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
|
||||||
|
this.debug('plType %j %j', stateChar, re)
|
||||||
|
stateChar = false
|
||||||
|
continue
|
||||||
|
|
||||||
|
case ')':
|
||||||
|
if (inClass || !patternListStack.length) {
|
||||||
|
re += '\\)'
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
clearStateChar()
|
||||||
|
hasMagic = true
|
||||||
|
pl = patternListStack.pop()
|
||||||
|
// negation is (?:(?!js)[^/]*)
|
||||||
|
// The others are (?:<pattern>)<type>
|
||||||
|
re += pl.close
|
||||||
|
if (pl.type === '!') {
|
||||||
|
negativeLists.push(pl)
|
||||||
|
}
|
||||||
|
pl.reEnd = re.length
|
||||||
|
continue
|
||||||
|
|
||||||
|
case '|':
|
||||||
|
if (inClass || !patternListStack.length) {
|
||||||
|
re += '\\|'
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
clearStateChar()
|
||||||
|
re += '|'
|
||||||
|
continue
|
||||||
|
|
||||||
|
// these are mostly the same in regexp and glob
|
||||||
|
case '[':
|
||||||
|
// swallow any state-tracking char before the [
|
||||||
|
clearStateChar()
|
||||||
|
|
||||||
|
if (inClass) {
|
||||||
|
re += '\\' + c
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
inClass = true
|
||||||
|
classStart = i
|
||||||
|
reClassStart = re.length
|
||||||
|
re += c
|
||||||
|
continue
|
||||||
|
|
||||||
|
case ']':
|
||||||
|
// a right bracket shall lose its special
|
||||||
|
// meaning and represent itself in
|
||||||
|
// a bracket expression if it occurs
|
||||||
|
// first in the list. -- POSIX.2 2.8.3.2
|
||||||
|
if (i === classStart + 1 || !inClass) {
|
||||||
|
re += '\\' + c
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// handle the case where we left a class open.
|
||||||
|
// "[z-a]" is valid, equivalent to "\[z-a\]"
|
||||||
|
// split where the last [ was, make sure we don't have
|
||||||
|
// an invalid re. if so, re-walk the contents of the
|
||||||
|
// would-be class to re-translate any characters that
|
||||||
|
// were passed through as-is
|
||||||
|
// TODO: It would probably be faster to determine this
|
||||||
|
// without a try/catch and a new RegExp, but it's tricky
|
||||||
|
// to do safely. For now, this is safe and works.
|
||||||
|
cs = pattern.substring(classStart + 1, i)
|
||||||
|
try {
|
||||||
|
RegExp('[' + cs + ']')
|
||||||
|
} catch (er) {
|
||||||
|
// not a valid class!
|
||||||
|
sp = this.parse(cs, SUBPARSE)
|
||||||
|
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
|
||||||
|
hasMagic = hasMagic || sp[1]
|
||||||
|
inClass = false
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// finish up the class.
|
||||||
|
hasMagic = true
|
||||||
|
inClass = false
|
||||||
|
re += c
|
||||||
|
continue
|
||||||
|
|
||||||
|
default:
|
||||||
|
// swallow any state char that wasn't consumed
|
||||||
|
clearStateChar()
|
||||||
|
|
||||||
|
if (reSpecials[c] && !(c === '^' && inClass)) {
|
||||||
|
re += '\\'
|
||||||
|
}
|
||||||
|
|
||||||
|
re += c
|
||||||
|
break
|
||||||
|
|
||||||
|
} // switch
|
||||||
|
} // for
|
||||||
|
|
||||||
|
// handle the case where we left a class open.
|
||||||
|
// "[abc" is valid, equivalent to "\[abc"
|
||||||
|
if (inClass) {
|
||||||
|
// split where the last [ was, and escape it
|
||||||
|
// this is a huge pita. We now have to re-walk
|
||||||
|
// the contents of the would-be class to re-translate
|
||||||
|
// any characters that were passed through as-is
|
||||||
|
cs = pattern.substr(classStart + 1)
|
||||||
|
sp = this.parse(cs, SUBPARSE)
|
||||||
|
re = re.substr(0, reClassStart) + '\\[' + sp[0]
|
||||||
|
hasMagic = hasMagic || sp[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// handle the case where we had a +( thing at the *end*
|
||||||
|
// of the pattern.
|
||||||
|
// each pattern list stack adds 3 chars, and we need to go through
|
||||||
|
// and escape any | chars that were passed through as-is for the regexp.
|
||||||
|
// Go through and escape them, taking care not to double-escape any
|
||||||
|
// | chars that were already escaped.
|
||||||
|
for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
|
||||||
|
let tail
|
||||||
|
tail = re.slice(pl.reStart + pl.open.length)
|
||||||
|
this.debug('setting tail', re, pl)
|
||||||
|
// maybe some even number of \, then maybe 1 \, followed by a |
|
||||||
|
tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, (_, $1, $2) => {
|
||||||
|
/* istanbul ignore else - should already be done */
|
||||||
|
if (!$2) {
|
||||||
|
// the | isn't already escaped, so escape it.
|
||||||
|
$2 = '\\'
|
||||||
|
}
|
||||||
|
|
||||||
|
// need to escape all those slashes *again*, without escaping the
|
||||||
|
// one that we need for escaping the | character. As it works out,
|
||||||
|
// escaping an even number of slashes can be done by simply repeating
|
||||||
|
// it exactly after itself. That's why this trick works.
|
||||||
|
//
|
||||||
|
// I am sorry that you have to see this.
|
||||||
|
return $1 + $1 + $2 + '|'
|
||||||
|
})
|
||||||
|
|
||||||
|
this.debug('tail=%j\n %s', tail, tail, pl, re)
|
||||||
|
const t = pl.type === '*' ? star
|
||||||
|
: pl.type === '?' ? qmark
|
||||||
|
: '\\' + pl.type
|
||||||
|
|
||||||
|
hasMagic = true
|
||||||
|
re = re.slice(0, pl.reStart) + t + '\\(' + tail
|
||||||
|
}
|
||||||
|
|
||||||
|
// handle trailing things that only matter at the very end.
|
||||||
|
clearStateChar()
|
||||||
|
if (escaping) {
|
||||||
|
// trailing \\
|
||||||
|
re += '\\\\'
|
||||||
|
}
|
||||||
|
|
||||||
|
// only need to apply the nodot start if the re starts with
|
||||||
|
// something that could conceivably capture a dot
|
||||||
|
const addPatternStart = addPatternStartSet[re.charAt(0)]
|
||||||
|
|
||||||
|
// Hack to work around lack of negative lookbehind in JS
|
||||||
|
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
|
||||||
|
// like 'a.xyz.yz' doesn't match. So, the first negative
|
||||||
|
// lookahead, has to look ALL the way ahead, to the end of
|
||||||
|
// the pattern.
|
||||||
|
for (let n = negativeLists.length - 1; n > -1; n--) {
|
||||||
|
const nl = negativeLists[n]
|
||||||
|
|
||||||
|
const nlBefore = re.slice(0, nl.reStart)
|
||||||
|
const nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
|
||||||
|
let nlAfter = re.slice(nl.reEnd)
|
||||||
|
const nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + nlAfter
|
||||||
|
|
||||||
|
// Handle nested stuff like *(*.js|!(*.json)), where open parens
|
||||||
|
// mean that we should *not* include the ) in the bit that is considered
|
||||||
|
// "after" the negated section.
|
||||||
|
const openParensBefore = nlBefore.split('(').length - 1
|
||||||
|
let cleanAfter = nlAfter
|
||||||
|
for (let i = 0; i < openParensBefore; i++) {
|
||||||
|
cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
|
||||||
|
}
|
||||||
|
nlAfter = cleanAfter
|
||||||
|
|
||||||
|
const dollar = nlAfter === '' && isSub !== SUBPARSE ? '$' : ''
|
||||||
|
re = nlBefore + nlFirst + nlAfter + dollar + nlLast
|
||||||
|
}
|
||||||
|
|
||||||
|
// if the re is not "" at this point, then we need to make sure
|
||||||
|
// it doesn't match against an empty path part.
|
||||||
|
// Otherwise a/* will match a/, which it should not.
|
||||||
|
if (re !== '' && hasMagic) {
|
||||||
|
re = '(?=.)' + re
|
||||||
|
}
|
||||||
|
|
||||||
|
if (addPatternStart) {
|
||||||
|
re = patternStart + re
|
||||||
|
}
|
||||||
|
|
||||||
|
// parsing just a piece of a larger pattern.
|
||||||
|
if (isSub === SUBPARSE) {
|
||||||
|
return [re, hasMagic]
|
||||||
|
}
|
||||||
|
|
||||||
|
// skip the regexp for non-magical patterns
|
||||||
|
// unescape anything in it, though, so that it'll be
|
||||||
|
// an exact match against a file etc.
|
||||||
|
if (!hasMagic) {
|
||||||
|
return globUnescape(pattern)
|
||||||
|
}
|
||||||
|
|
||||||
|
const flags = options.nocase ? 'i' : ''
|
||||||
|
try {
|
||||||
|
return Object.assign(new RegExp('^' + re + '$', flags), {
|
||||||
|
_glob: pattern,
|
||||||
|
_src: re,
|
||||||
|
})
|
||||||
|
} catch (er) /* istanbul ignore next - should be impossible */ {
|
||||||
|
// If it was an invalid regular expression, then it can't match
|
||||||
|
// anything. This trick looks for a character after the end of
|
||||||
|
// the string, which is of course impossible, except in multi-line
|
||||||
|
// mode, but it's not a /m regex.
|
||||||
|
return new RegExp('$.')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
makeRe () {
|
||||||
|
if (this.regexp || this.regexp === false) return this.regexp
|
||||||
|
|
||||||
|
// at this point, this.set is a 2d array of partial
|
||||||
|
// pattern strings, or "**".
|
||||||
|
//
|
||||||
|
// It's better to use .match(). This function shouldn't
|
||||||
|
// be used, really, but it's pretty convenient sometimes,
|
||||||
|
// when you just want to work with a regex.
|
||||||
|
const set = this.set
|
||||||
|
|
||||||
|
if (!set.length) {
|
||||||
|
this.regexp = false
|
||||||
|
return this.regexp
|
||||||
|
}
|
||||||
|
const options = this.options
|
||||||
|
|
||||||
|
const twoStar = options.noglobstar ? star
|
||||||
|
: options.dot ? twoStarDot
|
||||||
|
: twoStarNoDot
|
||||||
|
const flags = options.nocase ? 'i' : ''
|
||||||
|
|
||||||
|
// coalesce globstars and regexpify non-globstar patterns
|
||||||
|
// if it's the only item, then we just do one twoStar
|
||||||
|
// if it's the first, and there are more, prepend (\/|twoStar\/)? to next
|
||||||
|
// if it's the last, append (\/twoStar|) to previous
|
||||||
|
// if it's in the middle, append (\/|\/twoStar\/) to previous
|
||||||
|
// then filter out GLOBSTAR symbols
|
||||||
|
let re = set.map(pattern => {
|
||||||
|
pattern = pattern.map(p =>
|
||||||
|
typeof p === 'string' ? regExpEscape(p)
|
||||||
|
: p === GLOBSTAR ? GLOBSTAR
|
||||||
|
: p._src
|
||||||
|
).reduce((set, p) => {
|
||||||
|
if (!(set[set.length - 1] === GLOBSTAR && p === GLOBSTAR)) {
|
||||||
|
set.push(p)
|
||||||
|
}
|
||||||
|
return set
|
||||||
|
}, [])
|
||||||
|
pattern.forEach((p, i) => {
|
||||||
|
if (p !== GLOBSTAR || pattern[i-1] === GLOBSTAR) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (i === 0) {
|
||||||
|
if (pattern.length > 1) {
|
||||||
|
pattern[i+1] = '(?:\\\/|' + twoStar + '\\\/)?' + pattern[i+1]
|
||||||
|
} else {
|
||||||
|
pattern[i] = twoStar
|
||||||
|
}
|
||||||
|
} else if (i === pattern.length - 1) {
|
||||||
|
pattern[i-1] += '(?:\\\/|' + twoStar + ')?'
|
||||||
|
} else {
|
||||||
|
pattern[i-1] += '(?:\\\/|\\\/' + twoStar + '\\\/)' + pattern[i+1]
|
||||||
|
pattern[i+1] = GLOBSTAR
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return pattern.filter(p => p !== GLOBSTAR).join('/')
|
||||||
|
}).join('|')
|
||||||
|
|
||||||
|
// must match entire pattern
|
||||||
|
// ending in a * or ** will make it less strict.
|
||||||
|
re = '^(?:' + re + ')$'
|
||||||
|
|
||||||
|
// can match anything, as long as it's not this.
|
||||||
|
if (this.negate) re = '^(?!' + re + ').*$'
|
||||||
|
|
||||||
|
try {
|
||||||
|
this.regexp = new RegExp(re, flags)
|
||||||
|
} catch (ex) /* istanbul ignore next - should be impossible */ {
|
||||||
|
this.regexp = false
|
||||||
|
}
|
||||||
|
return this.regexp
|
||||||
|
}
|
||||||
|
|
||||||
|
match (f, partial = this.partial) {
|
||||||
|
this.debug('match', f, this.pattern)
|
||||||
|
// short-circuit in the case of busted things.
|
||||||
|
// comments, etc.
|
||||||
|
if (this.comment) return false
|
||||||
|
if (this.empty) return f === ''
|
||||||
|
|
||||||
|
if (f === '/' && partial) return true
|
||||||
|
|
||||||
|
const options = this.options
|
||||||
|
|
||||||
|
// windows: need to use /, not \
|
||||||
|
if (path.sep !== '/') {
|
||||||
|
f = f.split(path.sep).join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
// treat the test path as a set of pathparts.
|
||||||
|
f = f.split(slashSplit)
|
||||||
|
this.debug(this.pattern, 'split', f)
|
||||||
|
|
||||||
|
// just ONE of the pattern sets in this.set needs to match
|
||||||
|
// in order for it to be valid. If negating, then just one
|
||||||
|
// match means that we have failed.
|
||||||
|
// Either way, return on the first hit.
|
||||||
|
|
||||||
|
const set = this.set
|
||||||
|
this.debug(this.pattern, 'set', set)
|
||||||
|
|
||||||
|
// Find the basename of the path by looking for the last non-empty segment
|
||||||
|
let filename
|
||||||
|
for (let i = f.length - 1; i >= 0; i--) {
|
||||||
|
filename = f[i]
|
||||||
|
if (filename) break
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < set.length; i++) {
|
||||||
|
const pattern = set[i]
|
||||||
|
let file = f
|
||||||
|
if (options.matchBase && pattern.length === 1) {
|
||||||
|
file = [filename]
|
||||||
|
}
|
||||||
|
const hit = this.matchOne(file, pattern, partial)
|
||||||
|
if (hit) {
|
||||||
|
if (options.flipNegate) return true
|
||||||
|
return !this.negate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// didn't get any hits. this is success if it's a negative
|
||||||
|
// pattern, failure otherwise.
|
||||||
|
if (options.flipNegate) return false
|
||||||
|
return this.negate
|
||||||
|
}
|
||||||
|
|
||||||
|
static defaults (def) {
|
||||||
|
return minimatch.defaults(def).Minimatch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
minimatch.Minimatch = Minimatch
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user