mirror of
https://github.com/github/codeql-action.git
synced 2026-01-02 04:30:14 +08:00
Compare commits
90 Commits
codeql-bun
...
v2.1.11
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a3a6c128d7 | ||
|
|
657581e7a8 | ||
|
|
657c2f3ff0 | ||
|
|
1725087693 | ||
|
|
e655565390 | ||
|
|
c38e41c45a | ||
|
|
0658e4b2d6 | ||
|
|
54b4854fda | ||
|
|
1fae5bf71b | ||
|
|
533ce91971 | ||
|
|
ace076b980 | ||
|
|
97847a4dde | ||
|
|
f8c88ab2dc | ||
|
|
2f58583a1b | ||
|
|
4e0668d05e | ||
|
|
c4fdf5fe69 | ||
|
|
4f87830a1f | ||
|
|
daf6560612 | ||
|
|
03e2e3c45f | ||
|
|
3bb6c41212 | ||
|
|
38fc5ebb37 | ||
|
|
a82d691646 | ||
|
|
ca6773e404 | ||
|
|
8dbd96566a | ||
|
|
ef73e3bee8 | ||
|
|
75b4f1c466 | ||
|
|
d468c94a69 | ||
|
|
7c55012151 | ||
|
|
f8eea91a7b | ||
|
|
878b64e0ef | ||
|
|
7cf0ed5e3f | ||
|
|
b651a677d2 | ||
|
|
827fd55c21 | ||
|
|
dd56e95b46 | ||
|
|
3c6dd303a8 | ||
|
|
96bc9c36c6 | ||
|
|
366e88c2c1 | ||
|
|
7b66e72cb7 | ||
|
|
06d4e82bd2 | ||
|
|
0fb78380f8 | ||
|
|
b71f20d70f | ||
|
|
8f845425a2 | ||
|
|
c9882bef2d | ||
|
|
9a6bf18ec4 | ||
|
|
0235de0279 | ||
|
|
a73e506617 | ||
|
|
b11fe85402 | ||
|
|
922dc2b976 | ||
|
|
395afb1dd9 | ||
|
|
ceeddf2638 | ||
|
|
06b15c22b1 | ||
|
|
ed0abc6cac | ||
|
|
193cfa588d | ||
|
|
d9e30cb001 | ||
|
|
ea676e3184 | ||
|
|
7c2be06006 | ||
|
|
0c3c093eba | ||
|
|
2bf00f719d | ||
|
|
02083c307e | ||
|
|
35ef6a2db3 | ||
|
|
5227afabbe | ||
|
|
6ed7f70798 | ||
|
|
04f504ca7f | ||
|
|
016ec75b7c | ||
|
|
7502d6e991 | ||
|
|
cbce00d08d | ||
|
|
0256599547 | ||
|
|
72861144fd | ||
|
|
6dd9baf8be | ||
|
|
ff8b365e79 | ||
|
|
eed184a534 | ||
|
|
c76f0b5b07 | ||
|
|
bf4ba6945d | ||
|
|
d2d14adf3e | ||
|
|
95b49c3e6b | ||
|
|
80771fd2d0 | ||
|
|
2b8fdb3f2e | ||
|
|
074853a9a2 | ||
|
|
ce63ab5d00 | ||
|
|
e87e2d8201 | ||
|
|
8a646279fc | ||
|
|
23b7196b6b | ||
|
|
5b5ed44ab7 | ||
|
|
faf9d4b499 | ||
|
|
9daf1de73c | ||
|
|
bce749b10f | ||
|
|
fce4a01cd7 | ||
|
|
bac9320f4f | ||
|
|
b3bf557359 | ||
|
|
f6312f1322 |
55
.github/update-release-branch.py
vendored
55
.github/update-release-branch.py
vendored
@@ -19,15 +19,19 @@ V1_MODE = 'v1-release'
|
|||||||
# Value of the mode flag for a v2 release
|
# Value of the mode flag for a v2 release
|
||||||
V2_MODE = 'v2-release'
|
V2_MODE = 'v2-release'
|
||||||
|
|
||||||
|
SOURCE_BRANCH_FOR_MODE = { V1_MODE: 'releases/v2', V2_MODE: 'main' }
|
||||||
|
TARGET_BRANCH_FOR_MODE = { V1_MODE: 'releases/v1', V2_MODE: 'releases/v2' }
|
||||||
|
|
||||||
# Name of the remote
|
# Name of the remote
|
||||||
ORIGIN = 'origin'
|
ORIGIN = 'origin'
|
||||||
|
|
||||||
# Runs git with the given args and returns the stdout.
|
# Runs git with the given args and returns the stdout.
|
||||||
# Raises an error if git does not exit successfully.
|
# Raises an error if git does not exit successfully (unless passed
|
||||||
def run_git(*args):
|
# allow_non_zero_exit_code=True).
|
||||||
|
def run_git(*args, allow_non_zero_exit_code=False):
|
||||||
cmd = ['git', *args]
|
cmd = ['git', *args]
|
||||||
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
if (p.returncode != 0):
|
if not allow_non_zero_exit_code and p.returncode != 0:
|
||||||
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
|
raise Exception('Call to ' + ' '.join(cmd) + ' exited with code ' + str(p.returncode) + ' stderr:' + p.stderr.decode('ascii'))
|
||||||
return p.stdout.decode('ascii')
|
return p.stdout.decode('ascii')
|
||||||
|
|
||||||
@@ -36,7 +40,9 @@ def branch_exists_on_remote(branch_name):
|
|||||||
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
return run_git('ls-remote', '--heads', ORIGIN, branch_name).strip() != ''
|
||||||
|
|
||||||
# Opens a PR from the given branch to the target branch
|
# Opens a PR from the given branch to the target branch
|
||||||
def open_pr(repo, all_commits, source_branch_short_sha, new_branch_name, source_branch, target_branch, conductor, is_v2_release, labels):
|
def open_pr(
|
||||||
|
repo, all_commits, source_branch_short_sha, new_branch_name, source_branch, target_branch,
|
||||||
|
conductor, is_v2_release, labels, conflicted_files):
|
||||||
# Sort the commits into the pull requests that introduced them,
|
# Sort the commits into the pull requests that introduced them,
|
||||||
# and any commits that don't have a pull request
|
# and any commits that don't have a pull request
|
||||||
pull_requests = []
|
pull_requests = []
|
||||||
@@ -81,6 +87,12 @@ def open_pr(repo, all_commits, source_branch_short_sha, new_branch_name, source_
|
|||||||
|
|
||||||
body.append('')
|
body.append('')
|
||||||
body.append('Please review the following:')
|
body.append('Please review the following:')
|
||||||
|
if len(conflicted_files) > 0:
|
||||||
|
body.append(' - [ ] You have added commits to this branch that resolve the merge conflicts ' +
|
||||||
|
'in the following files:')
|
||||||
|
body.extend([f' - [ ] `{file}`' for file in conflicted_files])
|
||||||
|
body.append(' - [ ] Another maintainer has reviewed the additional commits you added to this ' +
|
||||||
|
'branch to resolve the merge conflicts.')
|
||||||
body.append(' - [ ] The CHANGELOG displays the correct version and date.')
|
body.append(' - [ ] The CHANGELOG displays the correct version and date.')
|
||||||
body.append(' - [ ] The CHANGELOG includes all relevant, user-facing changes since the last release.')
|
body.append(' - [ ] The CHANGELOG includes all relevant, user-facing changes since the last release.')
|
||||||
body.append(' - [ ] There are no unexpected commits being merged into the ' + target_branch + ' branch.')
|
body.append(' - [ ] There are no unexpected commits being merged into the ' + target_branch + ' branch.')
|
||||||
@@ -191,8 +203,10 @@ def main():
|
|||||||
type=str,
|
type=str,
|
||||||
required=True,
|
required=True,
|
||||||
choices=[V2_MODE, V1_MODE],
|
choices=[V2_MODE, V1_MODE],
|
||||||
help=f"Which release to perform. '{V2_MODE}' uses main as the source branch and v2 as the target branch. " +
|
help=f"Which release to perform. '{V2_MODE}' uses {SOURCE_BRANCH_FOR_MODE[V2_MODE]} as the source " +
|
||||||
f"'{V1_MODE}' uses v2 as the source branch and v1 as the target branch."
|
f"branch and {TARGET_BRANCH_FOR_MODE[V2_MODE]} as the target branch. " +
|
||||||
|
f"'{V1_MODE}' uses {SOURCE_BRANCH_FOR_MODE[V1_MODE]} as the source branch and " +
|
||||||
|
f"{TARGET_BRANCH_FOR_MODE[V1_MODE]} as the target branch."
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--conductor',
|
'--conductor',
|
||||||
@@ -203,14 +217,8 @@ def main():
|
|||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if args.mode == V2_MODE:
|
source_branch = SOURCE_BRANCH_FOR_MODE[args.mode]
|
||||||
source_branch = 'main'
|
target_branch = TARGET_BRANCH_FOR_MODE[args.mode]
|
||||||
target_branch = 'v2'
|
|
||||||
elif args.mode == V1_MODE:
|
|
||||||
source_branch = 'v2'
|
|
||||||
target_branch = 'v1'
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Unexpected value for release mode: '{args.mode}'")
|
|
||||||
|
|
||||||
repo = Github(args.github_token).get_repo(args.repository_nwo)
|
repo = Github(args.github_token).get_repo(args.repository_nwo)
|
||||||
version = get_current_version()
|
version = get_current_version()
|
||||||
@@ -246,10 +254,15 @@ def main():
|
|||||||
# Create the new branch and push it to the remote
|
# Create the new branch and push it to the remote
|
||||||
print('Creating branch ' + new_branch_name)
|
print('Creating branch ' + new_branch_name)
|
||||||
|
|
||||||
|
# The process of creating the v1 release can run into merge conflicts. We commit the unresolved
|
||||||
|
# conflicts so a maintainer can easily resolve them (vs erroring and requiring maintainers to
|
||||||
|
# reconstruct the release manually)
|
||||||
|
conflicted_files = []
|
||||||
|
|
||||||
if args.mode == V1_MODE:
|
if args.mode == V1_MODE:
|
||||||
# If we're performing a backport, start from the v1 branch
|
# If we're performing a backport, start from the target branch
|
||||||
print(f'Creating {new_branch_name} from the {ORIGIN}/v1 branch')
|
print(f'Creating {new_branch_name} from the {ORIGIN}/{target_branch} branch')
|
||||||
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/v1')
|
run_git('checkout', '-b', new_branch_name, f'{ORIGIN}/{target_branch}')
|
||||||
|
|
||||||
# Revert the commit that we made as part of the last release that updated the version number and
|
# Revert the commit that we made as part of the last release that updated the version number and
|
||||||
# changelog to refer to 1.x.x variants. This avoids merge conflicts in the changelog and
|
# changelog to refer to 1.x.x variants. This avoids merge conflicts in the changelog and
|
||||||
@@ -274,7 +287,12 @@ def main():
|
|||||||
print(' Nothing to revert.')
|
print(' Nothing to revert.')
|
||||||
|
|
||||||
print(f'Merging {ORIGIN}/{source_branch} into the release prep branch')
|
print(f'Merging {ORIGIN}/{source_branch} into the release prep branch')
|
||||||
run_git('merge', f'{ORIGIN}/{source_branch}', '--no-edit')
|
# Commit any conflicts (see the comment for `conflicted_files`)
|
||||||
|
run_git('merge', f'{ORIGIN}/{source_branch}', allow_non_zero_exit_code=True)
|
||||||
|
conflicted_files = run_git('diff', '--name-only', '--diff-filter', 'U').splitlines()
|
||||||
|
if len(conflicted_files) > 0:
|
||||||
|
run_git('add', '.')
|
||||||
|
run_git('commit', '--no-edit')
|
||||||
|
|
||||||
# Migrate the package version number from a v2 version number to a v1 version number
|
# Migrate the package version number from a v2 version number to a v1 version number
|
||||||
print(f'Setting version number to {version}')
|
print(f'Setting version number to {version}')
|
||||||
@@ -317,6 +335,7 @@ def main():
|
|||||||
conductor=args.conductor,
|
conductor=args.conductor,
|
||||||
is_v2_release=args.mode == V2_MODE,
|
is_v2_release=args.mode == V2_MODE,
|
||||||
labels=['Update dependencies'] if args.mode == V1_MODE else [],
|
labels=['Update dependencies'] if args.mode == V1_MODE else [],
|
||||||
|
conflicted_files=conflicted_files
|
||||||
)
|
)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
4
.github/workflows/__analyze-ref-input.yml
generated
vendored
4
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
4
.github/workflows/__debug-artifacts.yml
generated
vendored
4
.github/workflows/__debug-artifacts.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
4
.github/workflows/__extractor-ram-threads.yml
generated
vendored
4
.github/workflows/__extractor-ram-threads.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
4
.github/workflows/__go-custom-queries.yml
generated
vendored
4
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
4
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
4
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
4
.github/workflows/__go-custom-tracing.yml
generated
vendored
4
.github/workflows/__go-custom-tracing.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
4
.github/workflows/__javascript-source-root.yml
generated
vendored
4
.github/workflows/__javascript-source-root.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
24
.github/workflows/__ml-powered-queries.yml
generated
vendored
24
.github/workflows/__ml-powered-queries.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -82,10 +82,20 @@ jobs:
|
|||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
||||||
- name: Check results
|
- name: Check results
|
||||||
|
# Running ML-powered queries on Windows requires CodeQL CLI 2.9.0+. We don't run these checks
|
||||||
|
# against Windows and `cached` while CodeQL CLI 2.9.0 makes its way into `cached` to avoid the
|
||||||
|
# test starting to fail when the cached CodeQL Bundle gets updated. Once the CodeQL Bundle
|
||||||
|
# containing CodeQL CLI 2.9.0 has been fully released, we can drop this line and start running
|
||||||
|
# these checks on Windows and `cached`.
|
||||||
|
if: matrix.os != 'windows-latest' || matrix.version != 'cached'
|
||||||
env:
|
env:
|
||||||
IS_WINDOWS: ${{ matrix.os == 'windows-latest' }}
|
# Running on Windows requires CodeQL CLI 2.9.0+, which has so far only made it to 'latest'.
|
||||||
|
SHOULD_RUN_ML_POWERED_QUERIES: ${{ matrix.os != 'windows-latest' || matrix.version
|
||||||
|
== 'latest' || matrix.version == 'nightly-latest' }}
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
echo "Expecting ML-powered queries to be run: ${SHOULD_RUN_ML_POWERED_QUERIES}"
|
||||||
|
|
||||||
cd "$RUNNER_TEMP/results"
|
cd "$RUNNER_TEMP/results"
|
||||||
# We should run at least the ML-powered queries in `expected_rules`.
|
# We should run at least the ML-powered queries in `expected_rules`.
|
||||||
expected_rules="js/ml-powered/nosql-injection js/ml-powered/path-injection js/ml-powered/sql-injection js/ml-powered/xss"
|
expected_rules="js/ml-powered/nosql-injection js/ml-powered/path-injection js/ml-powered/sql-injection js/ml-powered/xss"
|
||||||
@@ -94,10 +104,10 @@ jobs:
|
|||||||
found_rule=$(jq --arg rule "${rule}" '[.runs[0].tool.extensions[].rules | select(. != null) |
|
found_rule=$(jq --arg rule "${rule}" '[.runs[0].tool.extensions[].rules | select(. != null) |
|
||||||
flatten | .[].id] | any(. == $rule)' javascript.sarif)
|
flatten | .[].id] | any(. == $rule)' javascript.sarif)
|
||||||
echo "Did find rule '${rule}': ${found_rule}"
|
echo "Did find rule '${rule}': ${found_rule}"
|
||||||
if [[ "${found_rule}" != "true" && "${IS_WINDOWS}" != "true" ]]; then
|
if [[ "${found_rule}" != "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
|
||||||
echo "Expected SARIF output to contain rule '${rule}', but found no such rule."
|
echo "Expected SARIF output to contain rule '${rule}', but found no such rule."
|
||||||
exit 1
|
exit 1
|
||||||
elif [[ "${found_rule}" == "true" && "${IS_WINDOWS}" == "true" ]]; then
|
elif [[ "${found_rule}" == "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
|
||||||
echo "Found rule '${rule}' in the SARIF output which shouldn't have been part of the analysis."
|
echo "Found rule '${rule}' in the SARIF output which shouldn't have been part of the analysis."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
@@ -108,10 +118,10 @@ jobs:
|
|||||||
select(.properties.score != null and (.rule.id | startswith("js/ml-powered/")))] | length' \
|
select(.properties.score != null and (.rule.id | startswith("js/ml-powered/")))] | length' \
|
||||||
javascript.sarif)
|
javascript.sarif)
|
||||||
echo "Found ${num_alerts} alerts from ML-powered queries.";
|
echo "Found ${num_alerts} alerts from ML-powered queries.";
|
||||||
if [[ "${num_alerts}" -eq 0 && "${IS_WINDOWS}" != "true" ]]; then
|
if [[ "${num_alerts}" -eq 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
|
||||||
echo "Expected to find at least one alert from an ML-powered query but found ${num_alerts}."
|
echo "Expected to find at least one alert from an ML-powered query but found ${num_alerts}."
|
||||||
exit 1
|
exit 1
|
||||||
elif [[ "${num_alerts}" -ne 0 && "${IS_WINDOWS}" == "true" ]]; then
|
elif [[ "${num_alerts}" -ne 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
|
||||||
echo "Expected not to find any alerts from an ML-powered query but found ${num_alerts}."
|
echo "Expected not to find any alerts from an ML-powered query but found ${num_alerts}."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|||||||
4
.github/workflows/__multi-language-autodetect.yml
generated
vendored
4
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
34
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
34
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -26,9 +26,27 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
version: nightly-20210831
|
version: latest
|
||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: nightly-20210831
|
version: latest
|
||||||
|
- os: windows-2019
|
||||||
|
version: latest
|
||||||
|
- os: windows-2022
|
||||||
|
version: latest
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: cached
|
||||||
|
- os: macos-latest
|
||||||
|
version: cached
|
||||||
|
- os: windows-2019
|
||||||
|
version: cached
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: nightly-latest
|
||||||
|
- os: macos-latest
|
||||||
|
version: nightly-latest
|
||||||
|
- os: windows-2019
|
||||||
|
version: nightly-latest
|
||||||
|
- os: windows-2022
|
||||||
|
version: nightly-latest
|
||||||
name: 'Packaging: Config and input'
|
name: 'Packaging: Config and input'
|
||||||
timeout-minutes: 45
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
@@ -43,7 +61,7 @@ jobs:
|
|||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||||
packs: +dsp-testing/codeql-pack1@0.1.0
|
packs: +dsp-testing/codeql-pack1@1.0.0
|
||||||
languages: javascript
|
languages: javascript
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
- name: Build code
|
- name: Build code
|
||||||
@@ -58,11 +76,11 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cd "$RUNNER_TEMP/results"
|
cd "$RUNNER_TEMP/results"
|
||||||
# We should have 3 hits from these rules
|
# We should have 4 hits from these rules
|
||||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
|
||||||
|
|
||||||
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
|
||||||
echo "Found matching rules '$RULES'"
|
echo "Found matching rules '$RULES'"
|
||||||
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||||
echo "Did not match expected rules '$EXPECTED_RULES'."
|
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||||
|
|||||||
32
.github/workflows/__packaging-config-js.yml
generated
vendored
32
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -26,9 +26,27 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
version: nightly-20210831
|
version: latest
|
||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: nightly-20210831
|
version: latest
|
||||||
|
- os: windows-2019
|
||||||
|
version: latest
|
||||||
|
- os: windows-2022
|
||||||
|
version: latest
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: cached
|
||||||
|
- os: macos-latest
|
||||||
|
version: cached
|
||||||
|
- os: windows-2019
|
||||||
|
version: cached
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: nightly-latest
|
||||||
|
- os: macos-latest
|
||||||
|
version: nightly-latest
|
||||||
|
- os: windows-2019
|
||||||
|
version: nightly-latest
|
||||||
|
- os: windows-2022
|
||||||
|
version: nightly-latest
|
||||||
name: 'Packaging: Config file'
|
name: 'Packaging: Config file'
|
||||||
timeout-minutes: 45
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
@@ -57,11 +75,11 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cd "$RUNNER_TEMP/results"
|
cd "$RUNNER_TEMP/results"
|
||||||
# We should have 3 hits from these rules
|
# We should have 4 hits from these rules
|
||||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
|
||||||
|
|
||||||
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
|
||||||
echo "Found matching rules '$RULES'"
|
echo "Found matching rules '$RULES'"
|
||||||
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||||
echo "Did not match expected rules '$EXPECTED_RULES'."
|
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||||
|
|||||||
34
.github/workflows/__packaging-inputs-js.yml
generated
vendored
34
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -26,9 +26,27 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
version: nightly-20210831
|
version: latest
|
||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: nightly-20210831
|
version: latest
|
||||||
|
- os: windows-2019
|
||||||
|
version: latest
|
||||||
|
- os: windows-2022
|
||||||
|
version: latest
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: cached
|
||||||
|
- os: macos-latest
|
||||||
|
version: cached
|
||||||
|
- os: windows-2019
|
||||||
|
version: cached
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: nightly-latest
|
||||||
|
- os: macos-latest
|
||||||
|
version: nightly-latest
|
||||||
|
- os: windows-2019
|
||||||
|
version: nightly-latest
|
||||||
|
- os: windows-2022
|
||||||
|
version: nightly-latest
|
||||||
name: 'Packaging: Action input'
|
name: 'Packaging: Action input'
|
||||||
timeout-minutes: 45
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
@@ -44,7 +62,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
config-file: .github/codeql/codeql-config-packaging2.yml
|
config-file: .github/codeql/codeql-config-packaging2.yml
|
||||||
languages: javascript
|
languages: javascript
|
||||||
packs: dsp-testing/codeql-pack1@0.1.0, dsp-testing/codeql-pack2
|
packs: dsp-testing/codeql-pack1@1.0.0, dsp-testing/codeql-pack2, dsp-testing/codeql-pack3:other-query.ql
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
- name: Build code
|
- name: Build code
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -58,11 +76,11 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cd "$RUNNER_TEMP/results"
|
cd "$RUNNER_TEMP/results"
|
||||||
# We should have 3 hits from these rules
|
# We should have 4 hits from these rules
|
||||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
|
||||||
|
|
||||||
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
|
||||||
echo "Found matching rules '$RULES'"
|
echo "Found matching rules '$RULES'"
|
||||||
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||||
echo "Did not match expected rules '$EXPECTED_RULES'."
|
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||||
|
|||||||
4
.github/workflows/__remote-config.yml
generated
vendored
4
.github/workflows/__remote-config.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
4
.github/workflows/__rubocop-multi-language.yml
generated
vendored
4
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
24
.github/workflows/__split-workflow.yml
generated
vendored
24
.github/workflows/__split-workflow.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -26,9 +26,17 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
version: nightly-20210831
|
version: latest
|
||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: nightly-20210831
|
version: latest
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: cached
|
||||||
|
- os: macos-latest
|
||||||
|
version: cached
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: nightly-latest
|
||||||
|
- os: macos-latest
|
||||||
|
version: nightly-latest
|
||||||
name: Split workflow
|
name: Split workflow
|
||||||
timeout-minutes: 45
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
@@ -43,7 +51,7 @@ jobs:
|
|||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||||
packs: +dsp-testing/codeql-pack1@0.1.0
|
packs: +dsp-testing/codeql-pack1@1.0.0
|
||||||
languages: javascript
|
languages: javascript
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
- name: Build code
|
- name: Build code
|
||||||
@@ -72,11 +80,11 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cd "$RUNNER_TEMP/results"
|
cd "$RUNNER_TEMP/results"
|
||||||
# We should have 3 hits from these rules
|
# We should have 4 hits from these rules
|
||||||
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/two-block"
|
EXPECTED_RULES="javascript/example/empty-or-one-block javascript/example/empty-or-one-block javascript/example/other-query-block javascript/example/two-block"
|
||||||
|
|
||||||
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
# use tr to replace newlines with spaces and xargs to trim leading and trailing whitespace
|
||||||
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n" " " | xargs)"
|
RULES="$(cat javascript.sarif | jq -r '.runs[0].results[].ruleId' | sort | tr "\n\r" " " | xargs)"
|
||||||
echo "Found matching rules '$RULES'"
|
echo "Found matching rules '$RULES'"
|
||||||
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
if [ "$RULES" != "$EXPECTED_RULES" ]; then
|
||||||
echo "Did not match expected rules '$EXPECTED_RULES'."
|
echo "Did not match expected rules '$EXPECTED_RULES'."
|
||||||
|
|||||||
4
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
4
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
4
.github/workflows/__test-local-codeql.yml
generated
vendored
4
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
4
.github/workflows/__test-proxy.yml
generated
vendored
4
.github/workflows/__test-proxy.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
4
.github/workflows/__test-ruby.yml
generated
vendored
4
.github/workflows/__test-ruby.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
4
.github/workflows/__unset-environment.yml
generated
vendored
4
.github/workflows/__unset-environment.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
4
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
4
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
4
.github/workflows/__with-checkout-path.yml
generated
vendored
4
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -11,8 +11,8 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
|
|||||||
31
.github/workflows/check-for-conflicts.yml
vendored
Normal file
31
.github/workflows/check-for-conflicts.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# Checks for any conflict markers created by git. This check is primarily intended to validate that
|
||||||
|
# any merge conflicts in the v2 -> v1 backport PR are fixed before the PR is merged.
|
||||||
|
name: Check for conflicts
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [main, v1, v2]
|
||||||
|
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||||
|
# by other workflows.
|
||||||
|
types: [opened, synchronize, reopened, ready_for_review]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-for-conflicts:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Check for conflicts
|
||||||
|
run: |
|
||||||
|
# Use `|| true` since grep returns exit code 1 if there are no matches, and we don't want
|
||||||
|
# this to fail the workflow.
|
||||||
|
FILES_WITH_CONFLICTS=$(grep --extended-regexp --ignore-case --line-number --recursive \
|
||||||
|
'^(<<<<<<<|>>>>>>>)' . || true)
|
||||||
|
if [[ "${FILES_WITH_CONFLICTS}" ]]; then
|
||||||
|
echo "Fail: Found merge conflict markers in the following files:"
|
||||||
|
echo ""
|
||||||
|
echo "${FILES_WITH_CONFLICTS}"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "Success: Found no merge conflict markers."
|
||||||
|
fi
|
||||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -2,9 +2,9 @@ name: "CodeQL action"
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main, v1, v2]
|
branches: [main, releases/v1, releases/v2]
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [main, v1, v2]
|
branches: [main, releases/v1, releases/v2]
|
||||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||||
# by other workflows.
|
# by other workflows.
|
||||||
types: [opened, synchronize, reopened, ready_for_review]
|
types: [opened, synchronize, reopened, ready_for_review]
|
||||||
|
|||||||
76
.github/workflows/post-release-mergeback.yml
vendored
76
.github/workflows/post-release-mergeback.yml
vendored
@@ -1,7 +1,8 @@
|
|||||||
# This workflow runs after a release of the action.
|
# This workflow runs after a release of the action. For v2 releases, it merges any changes from the
|
||||||
# It merges any changes from the release back into the
|
# release back into the main branch. Typically, this is just a single commit that updates the
|
||||||
# main branch. Typically, this is just a single commit
|
# changelog. For v2 and v1 releases, it then (a) tags the merge commit on the release branch that
|
||||||
# that updates the changelog.
|
# represents the new release with an `vx.y.z` tag and (b) updates the `vx` tag to refer to this
|
||||||
|
# commit.
|
||||||
name: Tag release and merge back
|
name: Tag release and merge back
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@@ -14,8 +15,8 @@ on:
|
|||||||
|
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- v1
|
- releases/v1
|
||||||
- v2
|
- releases/v2
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
merge-back:
|
merge-back:
|
||||||
@@ -32,7 +33,7 @@ jobs:
|
|||||||
- name: Dump GitHub context
|
- name: Dump GitHub context
|
||||||
env:
|
env:
|
||||||
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
GITHUB_CONTEXT: '${{ toJson(github) }}'
|
||||||
run: echo "$GITHUB_CONTEXT"
|
run: echo "${GITHUB_CONTEXT}"
|
||||||
|
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
@@ -46,25 +47,25 @@ jobs:
|
|||||||
id: getVersion
|
id: getVersion
|
||||||
run: |
|
run: |
|
||||||
VERSION="v$(jq '.version' -r 'package.json')"
|
VERSION="v$(jq '.version' -r 'package.json')"
|
||||||
SHORT_SHA="${GITHUB_SHA:0:8}"
|
echo "::set-output name=version::${VERSION}"
|
||||||
echo "::set-output name=version::$VERSION"
|
short_sha="${GITHUB_SHA:0:8}"
|
||||||
NEW_BRANCH="mergeback/${VERSION}-to-${BASE_BRANCH}-${SHORT_SHA}"
|
NEW_BRANCH="mergeback/${VERSION}-to-${BASE_BRANCH}-${short_sha}"
|
||||||
echo "::set-output name=newBranch::$NEW_BRANCH"
|
echo "::set-output name=newBranch::${NEW_BRANCH}"
|
||||||
|
|
||||||
|
|
||||||
- name: Dump branches
|
- name: Dump branches
|
||||||
env:
|
env:
|
||||||
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
||||||
run: |
|
run: |
|
||||||
echo "BASE_BRANCH $BASE_BRANCH"
|
echo "BASE_BRANCH ${BASE_BRANCH}"
|
||||||
echo "HEAD_BRANCH $HEAD_BRANCH"
|
echo "HEAD_BRANCH ${HEAD_BRANCH}"
|
||||||
echo "NEW_BRANCH $NEW_BRANCH"
|
echo "NEW_BRANCH ${NEW_BRANCH}"
|
||||||
|
|
||||||
- name: Create mergeback branch
|
- name: Create mergeback branch
|
||||||
env:
|
env:
|
||||||
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
||||||
run: |
|
run: |
|
||||||
git checkout -b "$NEW_BRANCH"
|
git checkout -b "${NEW_BRANCH}"
|
||||||
|
|
||||||
- name: Check for tag
|
- name: Check for tag
|
||||||
id: check
|
id: check
|
||||||
@@ -72,13 +73,13 @@ jobs:
|
|||||||
VERSION: "${{ steps.getVersion.outputs.version }}"
|
VERSION: "${{ steps.getVersion.outputs.version }}"
|
||||||
run: |
|
run: |
|
||||||
set +e # don't fail on an errored command
|
set +e # don't fail on an errored command
|
||||||
git ls-remote --tags origin | grep "$VERSION"
|
git ls-remote --tags origin | grep "${VERSION}"
|
||||||
EXISTS="$?"
|
exists="$?"
|
||||||
if [ "$EXISTS" -eq 0 ]; then
|
if [ "${exists}" -eq 0 ]; then
|
||||||
echo "Tag $TAG exists. Not going to re-release."
|
echo "Tag ${VERSION} exists. Not going to re-release."
|
||||||
echo "::set-output name=exists::true"
|
echo "::set-output name=exists::true"
|
||||||
else
|
else
|
||||||
echo "Tag $TAG does not exist yet."
|
echo "Tag ${VERSION} does not exist yet."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# we didn't tag the release during the update-release-branch workflow because the
|
# we didn't tag the release during the update-release-branch workflow because the
|
||||||
@@ -89,20 +90,31 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
VERSION: ${{ steps.getVersion.outputs.version }}
|
VERSION: ${{ steps.getVersion.outputs.version }}
|
||||||
run: |
|
run: |
|
||||||
git tag -a "$VERSION" -m "$VERSION"
|
# Unshallow the repo in order to allow pushes
|
||||||
git fetch --unshallow # unshallow the repo in order to allow pushes
|
git fetch --unshallow
|
||||||
git push origin --follow-tags "$VERSION"
|
# Create the `vx.y.z` tag
|
||||||
|
git tag --annotate "${VERSION}" --message "${VERSION}"
|
||||||
|
# Update the `vx` tag
|
||||||
|
major_version_tag=$(cut -d '.' -f1 <<< "${VERSION}")
|
||||||
|
# Use `--force` to overwrite the major version tag
|
||||||
|
git tag --annotate "${major_version_tag}" --message "${major_version_tag}" --force
|
||||||
|
# Push the tags, using:
|
||||||
|
# - `--atomic` to make sure we either update both tags or neither (an intermediate state,
|
||||||
|
# e.g. where we update the v2.x.y tag on the remote but not the v2 tag, could result in
|
||||||
|
# unwanted Dependabot updates, e.g. from v2 to v2.x.y)
|
||||||
|
# - `--force` since we're overwriting the `vx` tag
|
||||||
|
git push origin --atomic --force refs/tags/"${VERSION}" refs/tags/"${major_version_tag}"
|
||||||
|
|
||||||
- name: Create mergeback branch
|
- name: Create mergeback branch
|
||||||
if: steps.check.outputs.exists != 'true' && contains(github.ref, 'v2')
|
if: steps.check.outputs.exists != 'true' && contains(github.ref, 'releases/v2')
|
||||||
env:
|
env:
|
||||||
VERSION: "${{ steps.getVersion.outputs.version }}"
|
VERSION: "${{ steps.getVersion.outputs.version }}"
|
||||||
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
NEW_BRANCH: "${{ steps.getVersion.outputs.newBranch }}"
|
||||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
run: |
|
run: |
|
||||||
set -exu
|
set -exu
|
||||||
PR_TITLE="Mergeback $VERSION $HEAD_BRANCH into $BASE_BRANCH"
|
pr_title="Mergeback ${VERSION} ${HEAD_BRANCH} into ${BASE_BRANCH}"
|
||||||
PR_BODY="Updates version and changelog."
|
pr_body="Updates version and changelog."
|
||||||
|
|
||||||
# Update the version number ready for the next release
|
# Update the version number ready for the next release
|
||||||
npm version patch --no-git-tag-version
|
npm version patch --no-git-tag-version
|
||||||
@@ -110,16 +122,16 @@ jobs:
|
|||||||
# Update the changelog
|
# Update the changelog
|
||||||
perl -i -pe 's/^/## \[UNRELEASED\]\n\nNo user facing changes.\n\n/ if($.==3)' CHANGELOG.md
|
perl -i -pe 's/^/## \[UNRELEASED\]\n\nNo user facing changes.\n\n/ if($.==3)' CHANGELOG.md
|
||||||
git add .
|
git add .
|
||||||
git commit -m "Update changelog and version after $VERSION"
|
git commit -m "Update changelog and version after ${VERSION}"
|
||||||
|
|
||||||
git push origin "$NEW_BRANCH"
|
git push origin "${NEW_BRANCH}"
|
||||||
|
|
||||||
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft
|
# PR checks won't be triggered on PRs created by Actions. Therefore mark the PR as draft
|
||||||
# so that a maintainer can take the PR out of draft, thereby triggering the PR checks.
|
# so that a maintainer can take the PR out of draft, thereby triggering the PR checks.
|
||||||
gh pr create \
|
gh pr create \
|
||||||
--head "$NEW_BRANCH" \
|
--head "${NEW_BRANCH}" \
|
||||||
--base "$BASE_BRANCH" \
|
--base "${BASE_BRANCH}" \
|
||||||
--title "$PR_TITLE" \
|
--title "${pr_title}" \
|
||||||
--label "Update dependencies" \
|
--label "Update dependencies" \
|
||||||
--body "$PR_BODY" \
|
--body "${pr_body}" \
|
||||||
--draft
|
--draft
|
||||||
|
|||||||
2
.github/workflows/pr-checks.yml
vendored
2
.github/workflows/pr-checks.yml
vendored
@@ -2,7 +2,7 @@ name: PR Checks (Basic Checks and Runner)
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main, v1, v2]
|
branches: [main, releases/v1, releases/v2]
|
||||||
pull_request:
|
pull_request:
|
||||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||||
# by other workflows.
|
# by other workflows.
|
||||||
|
|||||||
2
.github/workflows/python-deps.yml
vendored
2
.github/workflows/python-deps.yml
vendored
@@ -2,7 +2,7 @@ name: Test Python Package Installation on Linux and Mac
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main, v1, v2]
|
branches: [main, releases/v1, releases/v2]
|
||||||
pull_request:
|
pull_request:
|
||||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||||
# by other workflows.
|
# by other workflows.
|
||||||
|
|||||||
35
.github/workflows/script/update-required-checks.sh
vendored
Executable file
35
.github/workflows/script/update-required-checks.sh
vendored
Executable file
@@ -0,0 +1,35 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Update the required checks based on the current branch.
|
||||||
|
# Typically, this will be main.
|
||||||
|
|
||||||
|
if [ -z "$GITHUB_TOKEN" ]; then
|
||||||
|
echo "Failed: No GitHub token found. This script requires admin access to `github/codeql-action`."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$#" -eq 1 ]; then
|
||||||
|
# If we were passed an argument, pass it as a query to fzf
|
||||||
|
GITHUB_SHA="$@"
|
||||||
|
elif [ "$#" -gt 1 ]; then
|
||||||
|
echo "Usage: $0 [SHA]"
|
||||||
|
echo "Update the required checks based on the SHA, or main."
|
||||||
|
elif [ -z "$GITHUB_SHA" ]; then
|
||||||
|
# If we don't have a SHA, use main
|
||||||
|
GITHUB_SHA="$(git rev-parse main)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Getting checks for $GITHUB_SHA"
|
||||||
|
|
||||||
|
# Ignore any checks with "https://", CodeQL, LGTM, and Update checks.
|
||||||
|
CHECKS="$(gh api repos/github/codeql-action/commits/${GITHUB_SHA}/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or contains("Update") | not)] | sort')"
|
||||||
|
|
||||||
|
echo "$CHECKS" | jq
|
||||||
|
|
||||||
|
echo "{\"contexts\": ${CHECKS}}" > checks.json
|
||||||
|
|
||||||
|
for BRANCH in main releases/v2 releases/v1; do
|
||||||
|
echo "Updating $BRANCH"
|
||||||
|
gh api --silent -X "PATCH" "repos/github/codeql-action/branches/$BRANCH/protection/required_status_checks" --input checks.json
|
||||||
|
done
|
||||||
|
|
||||||
|
rm checks.json
|
||||||
2
.github/workflows/update-release-branch.yml
vendored
2
.github/workflows/update-release-branch.yml
vendored
@@ -7,7 +7,7 @@ on:
|
|||||||
# When the v2 release is complete, this workflow will open a PR to update the v1 release branch.
|
# When the v2 release is complete, this workflow will open a PR to update the v1 release branch.
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- v2
|
- releases/v2
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
update:
|
update:
|
||||||
|
|||||||
13
CHANGELOG.md
13
CHANGELOG.md
@@ -1,9 +1,20 @@
|
|||||||
# CodeQL Action Changelog
|
# CodeQL Action Changelog
|
||||||
|
|
||||||
## [UNRELEASED]
|
## 2.1.11 - 17 May 2022
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.9.2. [#1074](https://github.com/github/codeql-action/pull/1074)
|
||||||
|
|
||||||
|
## 2.1.10 - 10 May 2022
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.9.1. [#1056](https://github.com/github/codeql-action/pull/1056)
|
||||||
|
- When `wait-for-processing` is enabled, the workflow will now fail if there were any errors that occurred during processing of the analysis results.
|
||||||
|
|
||||||
|
## 2.1.9 - 27 Apr 2022
|
||||||
|
|
||||||
- Add `working-directory` input to the `autobuild` action. [#1024](https://github.com/github/codeql-action/pull/1024)
|
- Add `working-directory` input to the `autobuild` action. [#1024](https://github.com/github/codeql-action/pull/1024)
|
||||||
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`. [#1007](https://github.com/github/codeql-action/pull/1007)
|
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`. [#1007](https://github.com/github/codeql-action/pull/1007)
|
||||||
|
- Update default CodeQL bundle version to 2.9.0.
|
||||||
|
- Fix a bug where [status reporting fails on Windows](https://github.com/github/codeql-action/issues/1041). [#1042](https://github.com/github/codeql-action/pull/1042)
|
||||||
|
|
||||||
## 2.1.8 - 08 Apr 2022
|
## 2.1.8 - 08 Apr 2022
|
||||||
|
|
||||||
|
|||||||
@@ -61,41 +61,30 @@ Here are a few things you can do that will increase the likelihood of your pull
|
|||||||
## Releasing (write access required)
|
## Releasing (write access required)
|
||||||
|
|
||||||
1. The first step of releasing a new version of the `codeql-action` is running the "Update release branch" workflow.
|
1. The first step of releasing a new version of the `codeql-action` is running the "Update release branch" workflow.
|
||||||
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `v2` release branch.
|
This workflow goes through the pull requests that have been merged to `main` since the last release, creates a changelog, then opens a pull request to merge the changes since the last release into the `releases/v2` release branch.
|
||||||
|
|
||||||
You can start a release by triggering this workflow via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
|
You can start a release by triggering this workflow via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
|
||||||
1. The workflow run will open a pull request titled "Merge main into v2". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
|
1. The workflow run will open a pull request titled "Merge main into releases/v2". Mark the pull request as [ready for review](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#marking-a-pull-request-as-ready-for-review) to trigger the PR checks.
|
||||||
1. Review the checklist items in the pull request description.
|
1. Review the checklist items in the pull request description.
|
||||||
Once you've checked off all but the last two of these, approve the PR and automerge it.
|
Once you've checked off all but the last two of these, approve the PR and automerge it.
|
||||||
1. When the "Merge main into v2" pull request is merged into the `v2` branch, the "Tag release and merge back" workflow will create a mergeback PR.
|
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Tag release and merge back" workflow will create a mergeback PR.
|
||||||
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into v2" pull request, and bumps the patch version of the CodeQL Action.
|
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into releases/v2" pull request, and bumps the patch version of the CodeQL Action.
|
||||||
|
|
||||||
Approve the mergeback PR and automerge it.
|
Approve the mergeback PR and automerge it.
|
||||||
1. When the "Merge main into v2" pull request is merged into the `v2` branch, the "Update release branch" workflow will create a "Merge v2 into v1" pull request to merge the changes since the last release into the `v1` release branch.
|
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Update release branch" workflow will create a "Merge releases/v2 into releases/v1" pull request to merge the changes since the last release into the `releases/v1` release branch.
|
||||||
This ensures we keep both the `v1` and `v2` release branches up to date and fully supported.
|
This ensures we keep both the `releases/v1` and `releases/v2` release branches up to date and fully supported.
|
||||||
|
|
||||||
Review the checklist items in the pull request description.
|
Review the checklist items in the pull request description.
|
||||||
Once you've checked off all the items, approve the PR and automerge it.
|
Once you've checked off all the items, approve the PR and automerge it.
|
||||||
1. Once the mergeback has been merged to `main` and the "Merge v2 into v1" PR has been merged to `v1`, the release is complete.
|
1. Once the mergeback has been merged to `main` and the "Merge releases/v2 into releases/v1" PR has been merged to `releases/v1`, the release is complete.
|
||||||
|
|
||||||
## Keeping the PR checks up to date (admin access required)
|
## Keeping the PR checks up to date (admin access required)
|
||||||
|
|
||||||
Since the `codeql-action` runs most of its testing through individual Actions workflows, there are over two hundred jobs that need to pass in order for a PR to turn green. Managing these PR checks manually is time consuming and complex. Here is a semi-automated approach.
|
Since the `codeql-action` runs most of its testing through individual Actions workflows, there are over two hundred jobs that need to pass in order for a PR to turn green. You can regenerate the checks automatically by running the [update-required-checks.sh](.github/workflows/script/update-required-checks.sh) script:
|
||||||
|
|
||||||
To regenerate the PR jobs for the action:
|
1. By default, this script retrieves the checks from the latest SHA on `main`, so make sure that your `main` branch is up to date.
|
||||||
|
2. Run the script. If there's a reason to, you can pass in a different SHA as a CLI argument.
|
||||||
1. From a terminal, run the following commands (replace `SHA` with the sha of the commit whose checks you want to use, typically this should be the latest from `main`):
|
3. After running, go to the [branch protection rules settings page](https://github.com/github/codeql-action/settings/branches) and validate that the rules for `main`, `v1`, and `v2` have been updated.
|
||||||
|
|
||||||
```sh
|
|
||||||
SHA= ####
|
|
||||||
CHECKS="$(gh api repos/github/codeql-action/commits/${SHA}/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or . == "Update dependencies" or . == "Update Supported Enterprise Server Versions" | not)]')"
|
|
||||||
echo "{\"contexts\": ${CHECKS}}" > checks.json
|
|
||||||
gh api -X "PATCH" repos/github/codeql-action/branches/main/protection/required_status_checks --input checks.json
|
|
||||||
gh api -X "PATCH" repos/github/codeql-action/branches/v2/protection/required_status_checks --input checks.json
|
|
||||||
gh api -X "PATCH" repos/github/codeql-action/branches/v1/protection/required_status_checks --input checks.json
|
|
||||||
````
|
|
||||||
|
|
||||||
2. Go to the [branch protection rules settings page](https://github.com/github/codeql-action/settings/branches) and validate that the rules have been updated.
|
|
||||||
|
|
||||||
## Resources
|
## Resources
|
||||||
|
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ jobs:
|
|||||||
uses: github/codeql-action/autobuild@v2
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 https://git.io/JvXDl
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
|
|
||||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following
|
# ✏️ If the Autobuild fails above, remove it and uncomment the following
|
||||||
# three lines and modify them (or add more) to build your code if your
|
# three lines and modify them (or add more) to build your code if your
|
||||||
|
|||||||
3
lib/actions-util.js
generated
3
lib/actions-util.js
generated
@@ -584,8 +584,7 @@ async function sendStatusReport(statusReport) {
|
|||||||
const statusReportJSON = JSON.stringify(statusReport);
|
const statusReportJSON = JSON.stringify(statusReport);
|
||||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||||
// If in test mode we don't want to upload the results
|
// If in test mode we don't want to upload the results
|
||||||
const testMode = process.env["TEST_MODE"] === "true" || false;
|
if ((0, util_1.isInTestMode)()) {
|
||||||
if (testMode) {
|
|
||||||
core.debug("In test mode. Status reports are not uploaded.");
|
core.debug("In test mode. Status reports are not uploaded.");
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
10
lib/analyze-action-env.test.js
generated
10
lib/analyze-action-env.test.js
generated
@@ -38,14 +38,17 @@ const util = __importStar(require("./util"));
|
|||||||
// but the first test would fail.
|
// but the first test would fail.
|
||||||
(0, ava_1.default)("analyze action with RAM & threads from environment variables", async (t) => {
|
(0, ava_1.default)("analyze action with RAM & threads from environment variables", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
|
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||||
process.env["GITHUB_REPOSITORY"] = "fake/repository";
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
sinon
|
sinon
|
||||||
.stub(actionsUtil, "createStatusReportBase")
|
.stub(actionsUtil, "createStatusReportBase")
|
||||||
.resolves({});
|
.resolves({});
|
||||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||||
|
const gitHubVersion = {
|
||||||
|
type: util.GitHubVariant.DOTCOM,
|
||||||
|
};
|
||||||
sinon.stub(configUtils, "getConfig").resolves({
|
sinon.stub(configUtils, "getConfig").resolves({
|
||||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
gitHubVersion,
|
||||||
languages: [],
|
languages: [],
|
||||||
packs: [],
|
packs: [],
|
||||||
});
|
});
|
||||||
@@ -54,6 +57,7 @@ const util = __importStar(require("./util"));
|
|||||||
requiredInputStub.withArgs("upload-database").returns("false");
|
requiredInputStub.withArgs("upload-database").returns("false");
|
||||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||||
|
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||||
// When there are no action inputs for RAM and threads, the action uses
|
// When there are no action inputs for RAM and threads, the action uses
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
10
lib/analyze-action-input.test.js
generated
10
lib/analyze-action-input.test.js
generated
@@ -38,14 +38,17 @@ const util = __importStar(require("./util"));
|
|||||||
// but the first test would fail.
|
// but the first test would fail.
|
||||||
(0, ava_1.default)("analyze action with RAM & threads from action inputs", async (t) => {
|
(0, ava_1.default)("analyze action with RAM & threads from action inputs", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
|
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||||
process.env["GITHUB_REPOSITORY"] = "fake/repository";
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
sinon
|
sinon
|
||||||
.stub(actionsUtil, "createStatusReportBase")
|
.stub(actionsUtil, "createStatusReportBase")
|
||||||
.resolves({});
|
.resolves({});
|
||||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||||
|
const gitHubVersion = {
|
||||||
|
type: util.GitHubVariant.DOTCOM,
|
||||||
|
};
|
||||||
sinon.stub(configUtils, "getConfig").resolves({
|
sinon.stub(configUtils, "getConfig").resolves({
|
||||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
gitHubVersion,
|
||||||
languages: [],
|
languages: [],
|
||||||
packs: [],
|
packs: [],
|
||||||
});
|
});
|
||||||
@@ -54,6 +57,7 @@ const util = __importStar(require("./util"));
|
|||||||
requiredInputStub.withArgs("upload-database").returns("false");
|
requiredInputStub.withArgs("upload-database").returns("false");
|
||||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||||
|
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||||
process.env["CODEQL_THREADS"] = "1";
|
process.env["CODEQL_THREADS"] = "1";
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
7
lib/analyze-action.js
generated
7
lib/analyze-action.js
generated
@@ -57,6 +57,7 @@ async function run() {
|
|||||||
let runStats = undefined;
|
let runStats = undefined;
|
||||||
let config = undefined;
|
let config = undefined;
|
||||||
util.initializeEnvironment(util.Mode.actions, pkg.version);
|
util.initializeEnvironment(util.Mode.actions, pkg.version);
|
||||||
|
await util.checkActionVersion(pkg.version);
|
||||||
try {
|
try {
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
||||||
return;
|
return;
|
||||||
@@ -117,7 +118,11 @@ async function run() {
|
|||||||
}
|
}
|
||||||
// Possibly upload the database bundles for remote queries
|
// Possibly upload the database bundles for remote queries
|
||||||
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger);
|
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger);
|
||||||
if (uploadResult !== undefined &&
|
// We don't upload results in test mode, so don't wait for processing
|
||||||
|
if (util.isInTestMode()) {
|
||||||
|
core.debug("In test mode. Waiting for processing is disabled.");
|
||||||
|
}
|
||||||
|
else if (uploadResult !== undefined &&
|
||||||
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
|
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
24
lib/analyze.js
generated
24
lib/analyze.js
generated
@@ -159,7 +159,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (packsWithVersion.length > 0) {
|
if (packsWithVersion.length > 0) {
|
||||||
querySuitePaths.push(await runQueryGroup(language, "packs", createPackSuiteContents(packsWithVersion), undefined));
|
querySuitePaths.push(...(await runQueryPacks(language, "packs", packsWithVersion, undefined)));
|
||||||
ranCustom = true;
|
ranCustom = true;
|
||||||
}
|
}
|
||||||
if (ranCustom) {
|
if (ranCustom) {
|
||||||
@@ -217,21 +217,23 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||||
return querySuitePath;
|
return querySuitePath;
|
||||||
}
|
}
|
||||||
|
async function runQueryPacks(language, type, packs, searchPath) {
|
||||||
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
|
// Run the queries individually instead of all at once to avoid command
|
||||||
|
// line length restrictions, particularly on windows.
|
||||||
|
for (const pack of packs) {
|
||||||
|
logger.debug(`Running query pack for ${language}-${type}: ${pack}`);
|
||||||
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
|
await codeql.databaseRunQueries(databasePath, searchPath, pack, memoryFlag, threadsFlag);
|
||||||
|
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||||
|
}
|
||||||
|
return packs;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
exports.runQueries = runQueries;
|
exports.runQueries = runQueries;
|
||||||
function createQuerySuiteContents(queries) {
|
function createQuerySuiteContents(queries) {
|
||||||
return queries.map((q) => `- query: ${q}`).join("\n");
|
return queries.map((q) => `- query: ${q}`).join("\n");
|
||||||
}
|
}
|
||||||
function createPackSuiteContents(packsWithVersion) {
|
|
||||||
return packsWithVersion.map(packWithVersionToQuerySuiteEntry).join("\n");
|
|
||||||
}
|
|
||||||
function packWithVersionToQuerySuiteEntry(pack) {
|
|
||||||
let text = `- qlpack: ${pack.packName}`;
|
|
||||||
if (pack.version) {
|
|
||||||
text += `\n version: ${pack.version}`;
|
|
||||||
}
|
|
||||||
return text;
|
|
||||||
}
|
|
||||||
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger) {
|
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger) {
|
||||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
if (await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
if (await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
37
lib/analyze.test.js
generated
37
lib/analyze.test.js
generated
@@ -26,7 +26,6 @@ const fs = __importStar(require("fs"));
|
|||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
const yaml = __importStar(require("js-yaml"));
|
||||||
const semver_1 = require("semver");
|
|
||||||
const sinon = __importStar(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const analyze_1 = require("./analyze");
|
const analyze_1 = require("./analyze");
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
@@ -53,18 +52,8 @@ const util = __importStar(require("./util"));
|
|||||||
const addSnippetsFlag = "";
|
const addSnippetsFlag = "";
|
||||||
const threadsFlag = "";
|
const threadsFlag = "";
|
||||||
const packs = {
|
const packs = {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: ["a/b@1.0.0"],
|
||||||
{
|
[languages_1.Language.java]: ["c/d@2.0.0"],
|
||||||
packName: "a/b",
|
|
||||||
version: (0, semver_1.clean)("1.0.0"),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
[languages_1.Language.java]: [
|
|
||||||
{
|
|
||||||
packName: "c/d",
|
|
||||||
version: (0, semver_1.clean)("2.0.0"),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
};
|
||||||
for (const language of Object.values(languages_1.Language)) {
|
for (const language of Object.values(languages_1.Language)) {
|
||||||
(0, codeql_1.setCodeQL)({
|
(0, codeql_1.setCodeQL)({
|
||||||
@@ -209,32 +198,10 @@ const util = __importStar(require("./util"));
|
|||||||
query: "bar.ql",
|
query: "bar.ql",
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
const qlsPackContentCpp = [
|
|
||||||
{
|
|
||||||
qlpack: "a/b",
|
|
||||||
version: "1.0.0",
|
|
||||||
},
|
|
||||||
];
|
|
||||||
const qlsPackContentJava = [
|
|
||||||
{
|
|
||||||
qlpack: "c/d",
|
|
||||||
version: "2.0.0",
|
|
||||||
},
|
|
||||||
];
|
|
||||||
for (const lang of Object.values(languages_1.Language)) {
|
for (const lang of Object.values(languages_1.Language)) {
|
||||||
t.deepEqual(readContents(`${lang}-queries-builtin.qls`), qlsContent);
|
t.deepEqual(readContents(`${lang}-queries-builtin.qls`), qlsContent);
|
||||||
t.deepEqual(readContents(`${lang}-queries-custom-0.qls`), qlsContent);
|
t.deepEqual(readContents(`${lang}-queries-custom-0.qls`), qlsContent);
|
||||||
t.deepEqual(readContents(`${lang}-queries-custom-1.qls`), qlsContent2);
|
t.deepEqual(readContents(`${lang}-queries-custom-1.qls`), qlsContent2);
|
||||||
const packSuiteName = `${lang}-queries-packs.qls`;
|
|
||||||
if (lang === languages_1.Language.cpp) {
|
|
||||||
t.deepEqual(readContents(packSuiteName), qlsPackContentCpp);
|
|
||||||
}
|
|
||||||
else if (lang === languages_1.Language.java) {
|
|
||||||
t.deepEqual(readContents(packSuiteName), qlsPackContentJava);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
t.false(fs.existsSync(path.join(tmpDir, "codeql_databases", packSuiteName)));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
function readContents(name) {
|
function readContents(name) {
|
||||||
const x = fs.readFileSync(path.join(tmpDir, "codeql_databases", name), "utf8");
|
const x = fs.readFileSync(path.join(tmpDir, "codeql_databases", name), "utf8");
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
3
lib/autobuild-action.js
generated
3
lib/autobuild-action.js
generated
@@ -39,8 +39,9 @@ async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguag
|
|||||||
await (0, actions_util_1.sendStatusReport)(statusReport);
|
await (0, actions_util_1.sendStatusReport)(statusReport);
|
||||||
}
|
}
|
||||||
async function run() {
|
async function run() {
|
||||||
const logger = (0, logging_1.getActionsLogger)();
|
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
|
const logger = (0, logging_1.getActionsLogger)();
|
||||||
|
await (0, util_1.checkActionVersion)(pkg.version);
|
||||||
let language = undefined;
|
let language = undefined;
|
||||||
try {
|
try {
|
||||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("autobuild", "starting", startedAt)))) {
|
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("autobuild", "starting", startedAt)))) {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAOwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAqD;AAErD,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAOwB;AACxB,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,iCAAyE;AAEzE,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,IAAA,sCAA0B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
20
lib/codeql.js
generated
20
lib/codeql.js
generated
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
|
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
@@ -86,6 +86,12 @@ exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
|
|||||||
* versions above that.
|
* versions above that.
|
||||||
*/
|
*/
|
||||||
exports.CODEQL_VERSION_NEW_TRACING = "2.7.0";
|
exports.CODEQL_VERSION_NEW_TRACING = "2.7.0";
|
||||||
|
/**
|
||||||
|
* Versions 2.9.0+ of the CodeQL CLI run machine learning models from a temporary directory, which
|
||||||
|
* resolves an issue on Windows where TensorFlow models are not correctly loaded due to the path of
|
||||||
|
* some of their files being greater than MAX_PATH (260 characters).
|
||||||
|
*/
|
||||||
|
exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = "2.9.0";
|
||||||
function getCodeQLBundleName() {
|
function getCodeQLBundleName() {
|
||||||
let platform;
|
let platform;
|
||||||
if (process.platform === "win32") {
|
if (process.platform === "win32") {
|
||||||
@@ -248,7 +254,9 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant
|
|||||||
}
|
}
|
||||||
const parsedCodeQLURL = new URL(codeqlURL);
|
const parsedCodeQLURL = new URL(codeqlURL);
|
||||||
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
|
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
|
||||||
const headers = { accept: "application/octet-stream" };
|
const headers = {
|
||||||
|
accept: "application/octet-stream",
|
||||||
|
};
|
||||||
// We only want to provide an authorization header if we are downloading
|
// We only want to provide an authorization header if we are downloading
|
||||||
// from the same GitHub instance the Action is running on.
|
// from the same GitHub instance the Action is running on.
|
||||||
// This avoids leaking Enterprise tokens to dotcom.
|
// This avoids leaking Enterprise tokens to dotcom.
|
||||||
@@ -394,7 +402,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
async getVersion() {
|
async getVersion() {
|
||||||
let result = util.getCachedCodeQlVersion();
|
let result = util.getCachedCodeQlVersion();
|
||||||
if (result === undefined) {
|
if (result === undefined) {
|
||||||
result = await runTool(cmd, ["version", "--format=terse"]);
|
result = (await runTool(cmd, ["version", "--format=terse"])).trim();
|
||||||
util.cacheCodeQlVersion(result);
|
util.cacheCodeQlVersion(result);
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
@@ -641,8 +649,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
"pack",
|
"pack",
|
||||||
"download",
|
"download",
|
||||||
"--format=json",
|
"--format=json",
|
||||||
|
"--resolve-query-specs",
|
||||||
...getExtraOptionsFromEnv(["pack", "download"]),
|
...getExtraOptionsFromEnv(["pack", "download"]),
|
||||||
...packs.map(packWithVersionToString),
|
...packs,
|
||||||
];
|
];
|
||||||
const output = await runTool(cmd, codeqlArgs);
|
const output = await runTool(cmd, codeqlArgs);
|
||||||
try {
|
try {
|
||||||
@@ -698,9 +707,6 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
}
|
}
|
||||||
return codeql;
|
return codeql;
|
||||||
}
|
}
|
||||||
function packWithVersionToString(pack) {
|
|
||||||
return pack.version ? `${pack.packName}@${pack.version}` : pack.packName;
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* Gets the options for `path` of `options` as an array of extra option strings.
|
* Gets the options for `path` of `options` as an array of extra option strings.
|
||||||
*/
|
*/
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
84
lib/config-utils.js
generated
84
lib/config-utils.js
generated
@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.parsePacksFromConfig = exports.getDefaultConfig = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPacksRequireLanguage = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = void 0;
|
exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePacksSpecification = exports.parsePacksFromConfig = exports.getDefaultConfig = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPacksRequireLanguage = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
const yaml = __importStar(require("js-yaml"));
|
||||||
@@ -131,11 +131,12 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
|
|||||||
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
|
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
|
||||||
// pack, then add the ML-powered query pack so that we run ML-powered queries.
|
// pack, then add the ML-powered query pack so that we run ML-powered queries.
|
||||||
if (
|
if (
|
||||||
// Disable ML-powered queries on Windows
|
// Only run ML-powered queries on Windows if we have a CLI that supports it.
|
||||||
process.platform !== "win32" &&
|
(process.platform !== "win32" ||
|
||||||
|
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS))) &&
|
||||||
languages.includes("javascript") &&
|
languages.includes("javascript") &&
|
||||||
(found === "security-extended" || found === "security-and-quality") &&
|
(found === "security-extended" || found === "security-and-quality") &&
|
||||||
!((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some((pack) => pack.packName === util_1.ML_POWERED_JS_QUERIES_PACK_NAME)) &&
|
!((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some(isMlPoweredJsQueriesPack)) &&
|
||||||
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
|
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
|
||||||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
|
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
|
||||||
if (!packs.javascript) {
|
if (!packs.javascript) {
|
||||||
@@ -148,6 +149,11 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
|
|||||||
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
||||||
return injectedMlQueries;
|
return injectedMlQueries;
|
||||||
}
|
}
|
||||||
|
function isMlPoweredJsQueriesPack(pack) {
|
||||||
|
return (pack === util_1.ML_POWERED_JS_QUERIES_PACK_NAME ||
|
||||||
|
pack.startsWith(`${util_1.ML_POWERED_JS_QUERIES_PACK_NAME}@`) ||
|
||||||
|
pack.startsWith(`${util_1.ML_POWERED_JS_QUERIES_PACK_NAME}:`));
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Retrieve the set of queries at localQueryPath and add them to resultMap.
|
* Retrieve the set of queries at localQueryPath and add them to resultMap.
|
||||||
*/
|
*/
|
||||||
@@ -634,7 +640,7 @@ function parsePacksFromConfig(packsByLanguage, languages, configFile) {
|
|||||||
}
|
}
|
||||||
packs[lang] = [];
|
packs[lang] = [];
|
||||||
for (const packStr of packsArr) {
|
for (const packStr of packsArr) {
|
||||||
packs[lang].push(toPackWithVersion(packStr, configFile));
|
packs[lang].push(validatePacksSpecification(packStr, configFile));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return packs;
|
return packs;
|
||||||
@@ -659,32 +665,74 @@ function parsePacksFromInput(packsInput, languages) {
|
|||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
[languages[0]]: packsInput.split(",").reduce((packs, pack) => {
|
[languages[0]]: packsInput.split(",").reduce((packs, pack) => {
|
||||||
packs.push(toPackWithVersion(pack, ""));
|
packs.push(validatePacksSpecification(pack, ""));
|
||||||
return packs;
|
return packs;
|
||||||
}, []),
|
}, []),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
function toPackWithVersion(packStr, configFile) {
|
/**
|
||||||
|
* Validates that this package specification is syntactically correct.
|
||||||
|
* It may not point to any real package, but after this function returns
|
||||||
|
* without throwing, we are guaranteed that the package specification
|
||||||
|
* is roughly correct.
|
||||||
|
*
|
||||||
|
* The CLI itself will do a more thorough validation of the package
|
||||||
|
* specification.
|
||||||
|
*
|
||||||
|
* A package specification looks like this:
|
||||||
|
*
|
||||||
|
* `scope/name@version:path`
|
||||||
|
*
|
||||||
|
* Version and path are optional.
|
||||||
|
*
|
||||||
|
* @param packStr the package specification to verify.
|
||||||
|
* @param configFile Config file to use for error reporting
|
||||||
|
*/
|
||||||
|
function validatePacksSpecification(packStr, configFile) {
|
||||||
if (typeof packStr !== "string") {
|
if (typeof packStr !== "string") {
|
||||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||||
}
|
}
|
||||||
const nameWithVersion = packStr.trim().split("@");
|
packStr = packStr.trim();
|
||||||
let version;
|
const atIndex = packStr.indexOf("@");
|
||||||
if (nameWithVersion.length > 2 ||
|
const colonIndex = packStr.indexOf(":", atIndex);
|
||||||
!PACK_IDENTIFIER_PATTERN.test(nameWithVersion[0])) {
|
const packStart = 0;
|
||||||
|
const versionStart = atIndex + 1 || undefined;
|
||||||
|
const pathStart = colonIndex + 1 || undefined;
|
||||||
|
const packEnd = Math.min(atIndex > 0 ? atIndex : Infinity, colonIndex > 0 ? colonIndex : Infinity, packStr.length);
|
||||||
|
const versionEnd = versionStart
|
||||||
|
? Math.min(colonIndex > 0 ? colonIndex : Infinity, packStr.length)
|
||||||
|
: undefined;
|
||||||
|
const pathEnd = pathStart ? packStr.length : undefined;
|
||||||
|
const packName = packStr.slice(packStart, packEnd).trim();
|
||||||
|
const version = versionStart
|
||||||
|
? packStr.slice(versionStart, versionEnd).trim()
|
||||||
|
: undefined;
|
||||||
|
const packPath = pathStart
|
||||||
|
? packStr.slice(pathStart, pathEnd).trim()
|
||||||
|
: undefined;
|
||||||
|
if (!PACK_IDENTIFIER_PATTERN.test(packName)) {
|
||||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||||
}
|
}
|
||||||
else if (nameWithVersion.length === 2) {
|
if (version) {
|
||||||
version = semver.clean(nameWithVersion[1]) || undefined;
|
try {
|
||||||
if (!version) {
|
new semver.Range(version);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
// The range string is invalid. OK to ignore the caught error
|
||||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return {
|
if (packPath &&
|
||||||
packName: nameWithVersion[0].trim(),
|
(path.isAbsolute(packPath) || path.normalize(packPath) !== packPath)) {
|
||||||
version,
|
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||||
};
|
}
|
||||||
|
if (!packPath && pathStart) {
|
||||||
|
// 0 length path
|
||||||
|
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||||
|
}
|
||||||
|
return (packName + (version ? `@${version}` : "") + (packPath ? `:${packPath}` : ""));
|
||||||
}
|
}
|
||||||
|
exports.validatePacksSpecification = validatePacksSpecification;
|
||||||
// exported for testing
|
// exported for testing
|
||||||
function parsePacks(rawPacksFromConfig, rawPacksInput, languages, configFile) {
|
function parsePacks(rawPacksFromConfig, rawPacksInput, languages, configFile) {
|
||||||
const packsFromInput = parsePacksFromInput(rawPacksInput, languages);
|
const packsFromInput = parsePacksFromInput(rawPacksInput, languages);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
134
lib/config-utils.test.js
generated
134
lib/config-utils.test.js
generated
@@ -26,7 +26,6 @@ const fs = __importStar(require("fs"));
|
|||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const github = __importStar(require("@actions/github"));
|
const github = __importStar(require("@actions/github"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const semver_1 = require("semver");
|
|
||||||
const sinon = __importStar(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
@@ -601,12 +600,7 @@ function queriesToResolvedQueryForm(queries) {
|
|||||||
const languages = "javascript";
|
const languages = "javascript";
|
||||||
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||||
t.deepEqual(packs, {
|
t.deepEqual(packs, {
|
||||||
[languages_1.Language.javascript]: [
|
[languages_1.Language.javascript]: ["a/b@1.2.3"],
|
||||||
{
|
|
||||||
packName: "a/b",
|
|
||||||
version: (0, semver_1.clean)("1.2.3"),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -640,18 +634,8 @@ function queriesToResolvedQueryForm(queries) {
|
|||||||
const languages = "javascript,python,cpp";
|
const languages = "javascript,python,cpp";
|
||||||
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||||
t.deepEqual(packs, {
|
t.deepEqual(packs, {
|
||||||
[languages_1.Language.javascript]: [
|
[languages_1.Language.javascript]: ["a/b@1.2.3"],
|
||||||
{
|
[languages_1.Language.python]: ["c/d@1.2.3"],
|
||||||
packName: "a/b",
|
|
||||||
version: (0, semver_1.clean)("1.2.3"),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
[languages_1.Language.python]: [
|
|
||||||
{
|
|
||||||
packName: "c/d",
|
|
||||||
version: (0, semver_1.clean)("1.2.3"),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
});
|
||||||
t.deepEqual(queries, {
|
t.deepEqual(queries, {
|
||||||
cpp: {
|
cpp: {
|
||||||
@@ -786,28 +770,47 @@ const invalidPackNameMacro = ava_1.default.macro({
|
|||||||
});
|
});
|
||||||
(0, ava_1.default)("no packs", parsePacksMacro, {}, [], {});
|
(0, ava_1.default)("no packs", parsePacksMacro, {}, [], {});
|
||||||
(0, ava_1.default)("two packs", parsePacksMacro, ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp], {
|
(0, ava_1.default)("two packs", parsePacksMacro, ["a/b", "c/d@1.2.3"], [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||||
{ packName: "a/b", version: undefined },
|
|
||||||
{ packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
|
|
||||||
],
|
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("two packs with spaces", parsePacksMacro, [" a/b ", " c/d@1.2.3 "], [languages_1.Language.cpp], {
|
(0, ava_1.default)("two packs with spaces", parsePacksMacro, [" a/b ", " c/d@1.2.3 "], [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||||
{ packName: "a/b", version: undefined },
|
|
||||||
{ packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
|
|
||||||
],
|
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("two packs with language", parsePacksMacro, {
|
(0, ava_1.default)("two packs with language", parsePacksMacro, {
|
||||||
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||||
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
|
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
|
||||||
}, [languages_1.Language.cpp, languages_1.Language.java, languages_1.Language.csharp], {
|
}, [languages_1.Language.cpp, languages_1.Language.java, languages_1.Language.csharp], {
|
||||||
|
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||||
|
[languages_1.Language.java]: ["d/e", "f/g@1.2.3"],
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("packs with other valid names", parsePacksMacro, [
|
||||||
|
// ranges are ok
|
||||||
|
"c/d@1.0",
|
||||||
|
"c/d@~1.0.0",
|
||||||
|
"c/d@~1.0.0:a/b",
|
||||||
|
"c/d@~1.0.0+abc:a/b",
|
||||||
|
"c/d@~1.0.0-abc:a/b",
|
||||||
|
"c/d:a/b",
|
||||||
|
// whitespace is removed
|
||||||
|
" c/d @ ~1.0.0 : b.qls ",
|
||||||
|
// and it is retained within a path
|
||||||
|
" c/d @ ~1.0.0 : b/a path with/spaces.qls ",
|
||||||
|
// this is valid. the path is '@'. It will probably fail when passed to the CLI
|
||||||
|
"c/d@1.2.3:@",
|
||||||
|
// this is valid, too. It will fail if it doesn't match a path
|
||||||
|
// (globbing is not done)
|
||||||
|
"c/d@1.2.3:+*)_(",
|
||||||
|
], [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: [
|
||||||
{ packName: "a/b", version: undefined },
|
"c/d@1.0",
|
||||||
{ packName: "c/d", version: (0, semver_1.clean)("1.2.3") },
|
"c/d@~1.0.0",
|
||||||
],
|
"c/d@~1.0.0:a/b",
|
||||||
[languages_1.Language.java]: [
|
"c/d@~1.0.0+abc:a/b",
|
||||||
{ packName: "d/e", version: undefined },
|
"c/d@~1.0.0-abc:a/b",
|
||||||
{ packName: "f/g", version: (0, semver_1.clean)("1.2.3") },
|
"c/d:a/b",
|
||||||
|
"c/d@~1.0.0:b.qls",
|
||||||
|
"c/d@~1.0.0:b/a path with/spaces.qls",
|
||||||
|
"c/d@1.2.3:@",
|
||||||
|
"c/d@1.2.3:+*)_(",
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("no language", parsePacksErrorMacro, ["a/b@1.2.3"], [languages_1.Language.java, languages_1.Language.python], /The configuration file "\/a\/b" is invalid: property "packs" must split packages by language/);
|
(0, ava_1.default)("no language", parsePacksErrorMacro, ["a/b@1.2.3"], [languages_1.Language.java, languages_1.Language.python], /The configuration file "\/a\/b" is invalid: property "packs" must split packages by language/);
|
||||||
@@ -817,7 +820,14 @@ const invalidPackNameMacro = ava_1.default.macro({
|
|||||||
(0, ava_1.default)(invalidPackNameMacro, "c-/d");
|
(0, ava_1.default)(invalidPackNameMacro, "c-/d");
|
||||||
(0, ava_1.default)(invalidPackNameMacro, "-c/d");
|
(0, ava_1.default)(invalidPackNameMacro, "-c/d");
|
||||||
(0, ava_1.default)(invalidPackNameMacro, "c/d_d");
|
(0, ava_1.default)(invalidPackNameMacro, "c/d_d");
|
||||||
(0, ava_1.default)(invalidPackNameMacro, "c/d@x");
|
(0, ava_1.default)(invalidPackNameMacro, "c/d@@");
|
||||||
|
(0, ava_1.default)(invalidPackNameMacro, "c/d@1.0.0:");
|
||||||
|
(0, ava_1.default)(invalidPackNameMacro, "c/d:");
|
||||||
|
(0, ava_1.default)(invalidPackNameMacro, "c/d:/a");
|
||||||
|
(0, ava_1.default)(invalidPackNameMacro, "@1.0.0:a");
|
||||||
|
(0, ava_1.default)(invalidPackNameMacro, "c/d@../a");
|
||||||
|
(0, ava_1.default)(invalidPackNameMacro, "c/d@b/../a");
|
||||||
|
(0, ava_1.default)(invalidPackNameMacro, "c/d:z@1");
|
||||||
/**
|
/**
|
||||||
* Test macro for testing the packs block and the packs input
|
* Test macro for testing the packs block and the packs input
|
||||||
*/
|
*/
|
||||||
@@ -834,39 +844,22 @@ function parseInputAndConfigErrorMacro(t, packsFromConfig, packsFromInput, langu
|
|||||||
}
|
}
|
||||||
parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and config Error: ${providedTitle}`;
|
parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and config Error: ${providedTitle}`;
|
||||||
(0, ava_1.default)("input only", parseInputAndConfigMacro, {}, " c/d ", [languages_1.Language.cpp], {
|
(0, ava_1.default)("input only", parseInputAndConfigMacro, {}, " c/d ", [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [{ packName: "c/d", version: undefined }],
|
[languages_1.Language.cpp]: ["c/d"],
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("input only with multiple", parseInputAndConfigMacro, {}, "a/b , c/d@1.2.3", [languages_1.Language.cpp], {
|
(0, ava_1.default)("input only with multiple", parseInputAndConfigMacro, {}, "a/b , c/d@1.2.3", [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||||
{ packName: "a/b", version: undefined },
|
|
||||||
{ packName: "c/d", version: "1.2.3" },
|
|
||||||
],
|
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("input only with +", parseInputAndConfigMacro, {}, " + a/b , c/d@1.2.3 ", [languages_1.Language.cpp], {
|
(0, ava_1.default)("input only with +", parseInputAndConfigMacro, {}, " + a/b , c/d@1.2.3 ", [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: ["a/b", "c/d@1.2.3"],
|
||||||
{ packName: "a/b", version: undefined },
|
|
||||||
{ packName: "c/d", version: "1.2.3" },
|
|
||||||
],
|
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("config only", parseInputAndConfigMacro, ["a/b", "c/d"], " ", [languages_1.Language.cpp], {
|
(0, ava_1.default)("config only", parseInputAndConfigMacro, ["a/b", "c/d"], " ", [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: ["a/b", "c/d"],
|
||||||
{ packName: "a/b", version: undefined },
|
|
||||||
{ packName: "c/d", version: undefined },
|
|
||||||
],
|
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("input overrides", parseInputAndConfigMacro, ["a/b", "c/d"], " e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
|
(0, ava_1.default)("input overrides", parseInputAndConfigMacro, ["a/b", "c/d"], " e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: ["e/f", "g/h@1.2.3"],
|
||||||
{ packName: "e/f", version: undefined },
|
|
||||||
{ packName: "g/h", version: "1.2.3" },
|
|
||||||
],
|
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("input and config", parseInputAndConfigMacro, ["a/b", "c/d"], " +e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
|
(0, ava_1.default)("input and config", parseInputAndConfigMacro, ["a/b", "c/d"], " +e/f, g/h@1.2.3 ", [languages_1.Language.cpp], {
|
||||||
[languages_1.Language.cpp]: [
|
[languages_1.Language.cpp]: ["e/f", "g/h@1.2.3", "a/b", "c/d"],
|
||||||
{ packName: "e/f", version: undefined },
|
|
||||||
{ packName: "g/h", version: "1.2.3" },
|
|
||||||
{ packName: "a/b", version: undefined },
|
|
||||||
{ packName: "c/d", version: undefined },
|
|
||||||
],
|
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("input with no language", parseInputAndConfigErrorMacro, {}, "c/d", [], /No languages specified/);
|
(0, ava_1.default)("input with no language", parseInputAndConfigErrorMacro, {}, "c/d", [], /No languages specified/);
|
||||||
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
|
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
|
||||||
@@ -895,10 +888,7 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
|||||||
if (expectedVersionString !== undefined) {
|
if (expectedVersionString !== undefined) {
|
||||||
t.deepEqual(packs, {
|
t.deepEqual(packs, {
|
||||||
[languages_1.Language.javascript]: [
|
[languages_1.Language.javascript]: [
|
||||||
{
|
`codeql/javascript-experimental-atm-queries@${expectedVersionString}`,
|
||||||
packName: "codeql/javascript-experimental-atm-queries",
|
|
||||||
version: expectedVersionString,
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -916,15 +906,23 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
|||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, undefined, "security-extended", undefined);
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, undefined, "security-extended", undefined);
|
||||||
// Test that ML-powered queries aren't run when the feature flag is off.
|
// Test that ML-powered queries aren't run when the feature flag is off.
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined);
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined);
|
||||||
|
// Test that the ~0.1.0 version of ML-powered queries is run on v2.8.3 of the CLI.
|
||||||
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.3", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.1.0");
|
||||||
// Test that ML-powered queries aren't run when the user hasn't specified that we should run the
|
// Test that ML-powered queries aren't run when the user hasn't specified that we should run the
|
||||||
// `security-extended` or `security-and-quality` query suite.
|
// `security-extended` or `security-and-quality` query suite.
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
|
||||||
// Test that ML-powered queries are run on non-Windows platforms running `security-extended`.
|
// Test that ML-powered queries are run on non-Windows platforms running `security-extended` on
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.1.0");
|
// versions of the CodeQL CLI prior to 2.9.0.
|
||||||
// Test that ML-powered queries are run on non-Windows platforms running `security-and-quality`.
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.5", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.2.0");
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, "security-and-quality", process.platform === "win32" ? undefined : "~0.1.0");
|
// Test that ML-powered queries are run on non-Windows platforms running `security-and-quality` on
|
||||||
|
// versions of the CodeQL CLI prior to 2.9.0.
|
||||||
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.5", true, undefined, "security-and-quality", process.platform === "win32" ? undefined : "~0.2.0");
|
||||||
|
// Test that ML-powered queries are run on all platforms running `security-extended` on CodeQL CLI
|
||||||
|
// 2.9.0+.
|
||||||
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, undefined, "security-extended", "~0.2.0");
|
||||||
|
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
|
||||||
|
// CLI 2.9.0+.
|
||||||
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, undefined, "security-and-quality", "~0.2.0");
|
||||||
// Test that we don't inject an ML-powered query pack if the user has already specified one.
|
// Test that we don't inject an ML-powered query pack if the user has already specified one.
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", process.platform === "win32" ? undefined : "0.0.1");
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", "0.0.1");
|
||||||
// Test that the ~0.2.0 version of ML-powered queries is run on v2.8.4 of the CLI.
|
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.4", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.2.0");
|
|
||||||
//# sourceMappingURL=config-utils.test.js.map
|
//# sourceMappingURL=config-utils.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
@@ -1,3 +1,3 @@
|
|||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-20220401"
|
"bundleVersion": "codeql-bundle-20220512"
|
||||||
}
|
}
|
||||||
|
|||||||
1
lib/init-action.js
generated
1
lib/init-action.js
generated
@@ -71,6 +71,7 @@ async function run() {
|
|||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
const logger = (0, logging_1.getActionsLogger)();
|
const logger = (0, logging_1.getActionsLogger)();
|
||||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
|
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
|
||||||
|
await (0, util_1.checkActionVersion)(pkg.version);
|
||||||
let config;
|
let config;
|
||||||
let codeql;
|
let codeql;
|
||||||
let toolsVersion;
|
let toolsVersion;
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
29
lib/upload-lib.js
generated
29
lib/upload-lib.js
generated
@@ -93,8 +93,7 @@ function getAutomationID(category, analysis_key, environment) {
|
|||||||
async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
|
async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
|
||||||
logger.info("Uploading results");
|
logger.info("Uploading results");
|
||||||
// If in test mode we don't want to upload the results
|
// If in test mode we don't want to upload the results
|
||||||
const testMode = process.env["TEST_MODE"] === "true" || false;
|
if (util.isInTestMode()) {
|
||||||
if (testMode) {
|
|
||||||
const payloadSaveFile = path.join(actionsUtil.getTemporaryDirectory(), "payload.json");
|
const payloadSaveFile = path.join(actionsUtil.getTemporaryDirectory(), "payload.json");
|
||||||
logger.info(`In test mode. Results are not uploaded. Saving to ${payloadSaveFile}`);
|
logger.info(`In test mode. Results are not uploaded. Saving to ${payloadSaveFile}`);
|
||||||
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
|
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
|
||||||
@@ -311,26 +310,28 @@ async function waitForProcessing(repositoryNwo, sarifID, apiDetails, logger) {
|
|||||||
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
let response = undefined;
|
||||||
try {
|
try {
|
||||||
const response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
|
response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
|
||||||
owner: repositoryNwo.owner,
|
owner: repositoryNwo.owner,
|
||||||
repo: repositoryNwo.repo,
|
repo: repositoryNwo.repo,
|
||||||
sarif_id: sarifID,
|
sarif_id: sarifID,
|
||||||
});
|
});
|
||||||
const status = response.data.processing_status;
|
|
||||||
logger.info(`Analysis upload status is ${status}.`);
|
|
||||||
if (status === "complete") {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
else if (status === "pending") {
|
|
||||||
logger.debug("Analysis processing is still pending...");
|
|
||||||
}
|
|
||||||
else if (status === "failed") {
|
|
||||||
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.warning(`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`);
|
logger.warning(`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
const status = response.data.processing_status;
|
||||||
|
logger.info(`Analysis upload status is ${status}.`);
|
||||||
|
if (status === "complete") {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
else if (status === "pending") {
|
||||||
|
logger.debug("Analysis processing is still pending...");
|
||||||
|
}
|
||||||
|
else if (status === "failed") {
|
||||||
|
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
|
||||||
}
|
}
|
||||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
9
lib/upload-sarif-action.js
generated
9
lib/upload-sarif-action.js
generated
@@ -37,8 +37,9 @@ async function sendSuccessStatusReport(startedAt, uploadStats) {
|
|||||||
await actionsUtil.sendStatusReport(statusReport);
|
await actionsUtil.sendStatusReport(statusReport);
|
||||||
}
|
}
|
||||||
async function run() {
|
async function run() {
|
||||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
|
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
|
(0, util_1.initializeEnvironment)(util_1.Mode.actions, pkg.version);
|
||||||
|
await (0, util_1.checkActionVersion)(pkg.version);
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", "starting", startedAt)))) {
|
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", "starting", startedAt)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -50,7 +51,11 @@ async function run() {
|
|||||||
const gitHubVersion = await (0, api_client_1.getGitHubVersionActionsOnly)();
|
const gitHubVersion = await (0, api_client_1.getGitHubVersionActionsOnly)();
|
||||||
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
|
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
|
||||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||||
if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
// We don't upload results in test mode, so don't wait for processing
|
||||||
|
if ((0, util_1.isInTestMode)()) {
|
||||||
|
core.debug("In test mode. Waiting for processing is disabled.");
|
||||||
|
}
|
||||||
|
else if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
|
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
|
||||||
}
|
}
|
||||||
await sendSuccessStatusReport(startedAt, uploadResult.statusReport);
|
await sendSuccessStatusReport(startedAt, uploadResult.statusReport);
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAA2D;AAC3D,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAA0E;AAE1E,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,wCAA2B,GAAE,CAAC;QAE1D,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QACjD,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YAClE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,EACnC,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAA2D;AAC3D,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAMgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,wCAA2B,GAAE,CAAC;QAE1D,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE;YAClB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;SACjE;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YACzE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,EACnC,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
50
lib/util.js
generated
50
lib/util.js
generated
@@ -22,13 +22,14 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isGitHubGhesVersionBelow = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
exports.isInTestMode = exports.checkActionVersion = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isGitHubGhesVersionBelow = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const del_1 = __importDefault(require("del"));
|
const del_1 = __importDefault(require("del"));
|
||||||
const semver = __importStar(require("semver"));
|
const semver = __importStar(require("semver"));
|
||||||
|
const api = __importStar(require("./api-client"));
|
||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
const apiCompatibility = __importStar(require("./api-compatibility.json"));
|
const apiCompatibility = __importStar(require("./api-compatibility.json"));
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
@@ -552,9 +553,9 @@ exports.ML_POWERED_JS_QUERIES_PACK_NAME = "codeql/javascript-experimental-atm-qu
|
|||||||
*/
|
*/
|
||||||
async function getMlPoweredJsQueriesPack(codeQL) {
|
async function getMlPoweredJsQueriesPack(codeQL) {
|
||||||
if (await codeQlVersionAbove(codeQL, "2.8.4")) {
|
if (await codeQlVersionAbove(codeQL, "2.8.4")) {
|
||||||
return { packName: exports.ML_POWERED_JS_QUERIES_PACK_NAME, version: "~0.2.0" };
|
return `${exports.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.2.0`;
|
||||||
}
|
}
|
||||||
return { packName: exports.ML_POWERED_JS_QUERIES_PACK_NAME, version: "~0.1.0" };
|
return `${exports.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.1.0`;
|
||||||
}
|
}
|
||||||
exports.getMlPoweredJsQueriesPack = getMlPoweredJsQueriesPack;
|
exports.getMlPoweredJsQueriesPack = getMlPoweredJsQueriesPack;
|
||||||
/**
|
/**
|
||||||
@@ -579,7 +580,10 @@ exports.getMlPoweredJsQueriesPack = getMlPoweredJsQueriesPack;
|
|||||||
* explanation as to why this is.
|
* explanation as to why this is.
|
||||||
*/
|
*/
|
||||||
function getMlPoweredJsQueriesStatus(config) {
|
function getMlPoweredJsQueriesStatus(config) {
|
||||||
const mlPoweredJsQueryPacks = (config.packs.javascript || []).filter((pack) => pack.packName === exports.ML_POWERED_JS_QUERIES_PACK_NAME);
|
const mlPoweredJsQueryPacks = (config.packs.javascript || [])
|
||||||
|
.map((pack) => pack.split("@"))
|
||||||
|
.filter((packNameVersion) => packNameVersion[0] === "codeql/javascript-experimental-atm-queries" &&
|
||||||
|
packNameVersion.length <= 2);
|
||||||
switch (mlPoweredJsQueryPacks.length) {
|
switch (mlPoweredJsQueryPacks.length) {
|
||||||
case 1:
|
case 1:
|
||||||
// We should always specify an explicit version string in `getMlPoweredJsQueriesPack`,
|
// We should always specify an explicit version string in `getMlPoweredJsQueriesPack`,
|
||||||
@@ -587,7 +591,7 @@ function getMlPoweredJsQueriesStatus(config) {
|
|||||||
// with each version of the CodeQL Action. Therefore in practice we should only hit the
|
// with each version of the CodeQL Action. Therefore in practice we should only hit the
|
||||||
// `latest` case here when customers have explicitly added the ML-powered query pack to their
|
// `latest` case here when customers have explicitly added the ML-powered query pack to their
|
||||||
// CodeQL config.
|
// CodeQL config.
|
||||||
return mlPoweredJsQueryPacks[0].version || "latest";
|
return mlPoweredJsQueryPacks[0][1] || "latest";
|
||||||
case 0:
|
case 0:
|
||||||
return "false";
|
return "false";
|
||||||
default:
|
default:
|
||||||
@@ -595,4 +599,40 @@ function getMlPoweredJsQueriesStatus(config) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.getMlPoweredJsQueriesStatus = getMlPoweredJsQueriesStatus;
|
exports.getMlPoweredJsQueriesStatus = getMlPoweredJsQueriesStatus;
|
||||||
|
/**
|
||||||
|
* Prompt the customer to upgrade to CodeQL Action v2, if appropriate.
|
||||||
|
*
|
||||||
|
* Check whether a customer is running v1. If they are, and we can determine that the GitHub
|
||||||
|
* instance supports v2, then log a warning about v1's upcoming deprecation prompting the customer
|
||||||
|
* to upgrade to v2.
|
||||||
|
*/
|
||||||
|
async function checkActionVersion(version) {
|
||||||
|
var _a;
|
||||||
|
if (!semver.satisfies(version, ">=2")) {
|
||||||
|
const githubVersion = await api.getGitHubVersionActionsOnly();
|
||||||
|
// Only log a warning for versions of GHES that are compatible with CodeQL Action version 2.
|
||||||
|
//
|
||||||
|
// GHES 3.4 shipped without the v2 tag, but it also shipped without this warning message code.
|
||||||
|
// Therefore users who are seeing this warning message code have pulled in a new version of the
|
||||||
|
// Action, and with it the v2 tag.
|
||||||
|
if (githubVersion.type === GitHubVariant.DOTCOM ||
|
||||||
|
githubVersion.type === GitHubVariant.GHAE ||
|
||||||
|
(githubVersion.type === GitHubVariant.GHES &&
|
||||||
|
semver.satisfies((_a = semver.coerce(githubVersion.version)) !== null && _a !== void 0 ? _a : "0.0.0", ">=3.4"))) {
|
||||||
|
core.warning("CodeQL Action v1 will be deprecated on December 7th, 2022. Please upgrade to v2. For " +
|
||||||
|
"more information, see " +
|
||||||
|
"https://github.blog/changelog/2022-04-27-code-scanning-deprecation-of-codeql-action-v1/");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.checkActionVersion = checkActionVersion;
|
||||||
|
/*
|
||||||
|
* Returns whether we are in test mode.
|
||||||
|
*
|
||||||
|
* In test mode, we don't upload SARIF results or status reports to the GitHub API.
|
||||||
|
*/
|
||||||
|
function isInTestMode() {
|
||||||
|
return process.env["TEST_MODE"] === "true" || false;
|
||||||
|
}
|
||||||
|
exports.isInTestMode = isInTestMode;
|
||||||
//# sourceMappingURL=util.js.map
|
//# sourceMappingURL=util.js.map
|
||||||
File diff suppressed because one or more lines are too long
73
lib/util.test.js
generated
73
lib/util.test.js
generated
@@ -25,6 +25,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const stream = __importStar(require("stream"));
|
const stream = __importStar(require("stream"));
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
const github = __importStar(require("@actions/github"));
|
const github = __importStar(require("@actions/github"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon = __importStar(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
@@ -208,40 +209,28 @@ const ML_POWERED_JS_STATUS_TESTS = [
|
|||||||
// If no packs are loaded, status is false.
|
// If no packs are loaded, status is false.
|
||||||
[[], "false"],
|
[[], "false"],
|
||||||
// If another pack is loaded but not the ML-powered query pack, status is false.
|
// If another pack is loaded but not the ML-powered query pack, status is false.
|
||||||
[[{ packName: "someOtherPack" }], "false"],
|
[["someOtherPack"], "false"],
|
||||||
// If the ML-powered query pack is loaded with a specific version, status is that version.
|
// If the ML-powered query pack is loaded with a specific version, status is that version.
|
||||||
[
|
[[`${util.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.1.0`], "~0.1.0"],
|
||||||
[{ packName: util.ML_POWERED_JS_QUERIES_PACK_NAME, version: "~0.1.0" }],
|
|
||||||
"~0.1.0",
|
|
||||||
],
|
|
||||||
// If the ML-powered query pack is loaded with a specific version and another pack is loaded, the
|
// If the ML-powered query pack is loaded with a specific version and another pack is loaded, the
|
||||||
// status is the version of the ML-powered query pack.
|
// status is the version of the ML-powered query pack.
|
||||||
[
|
[
|
||||||
[
|
["someOtherPack", `${util.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.1.0`],
|
||||||
{ packName: "someOtherPack" },
|
|
||||||
{ packName: util.ML_POWERED_JS_QUERIES_PACK_NAME, version: "~0.1.0" },
|
|
||||||
],
|
|
||||||
"~0.1.0",
|
"~0.1.0",
|
||||||
],
|
],
|
||||||
// If the ML-powered query pack is loaded without a version, the status is "latest".
|
// If the ML-powered query pack is loaded without a version, the status is "latest".
|
||||||
[[{ packName: util.ML_POWERED_JS_QUERIES_PACK_NAME }], "latest"],
|
[[util.ML_POWERED_JS_QUERIES_PACK_NAME], "latest"],
|
||||||
// If the ML-powered query pack is loaded with two different versions, the status is "other".
|
// If the ML-powered query pack is loaded with two different versions, the status is "other".
|
||||||
[
|
[
|
||||||
[
|
[
|
||||||
{ packName: util.ML_POWERED_JS_QUERIES_PACK_NAME, version: "0.0.1" },
|
`${util.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.0.1`,
|
||||||
{ packName: util.ML_POWERED_JS_QUERIES_PACK_NAME, version: "0.0.2" },
|
`${util.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.0.2`,
|
||||||
],
|
],
|
||||||
"other",
|
"other",
|
||||||
],
|
],
|
||||||
// If the ML-powered query pack is loaded with no specific version, and another pack is loaded,
|
// If the ML-powered query pack is loaded with no specific version, and another pack is loaded,
|
||||||
// the status is "latest".
|
// the status is "latest".
|
||||||
[
|
[["someOtherPack", util.ML_POWERED_JS_QUERIES_PACK_NAME], "latest"],
|
||||||
[
|
|
||||||
{ packName: "someOtherPack" },
|
|
||||||
{ packName: util.ML_POWERED_JS_QUERIES_PACK_NAME },
|
|
||||||
],
|
|
||||||
"latest",
|
|
||||||
],
|
|
||||||
];
|
];
|
||||||
for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
||||||
const packDescriptions = `[${packs
|
const packDescriptions = `[${packs
|
||||||
@@ -281,4 +270,50 @@ for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
|||||||
t.falsy(util.isGitHubGhesVersionBelow({ type: util.GitHubVariant.GHES, version: "3.2.0" }, "3.2.0"));
|
t.falsy(util.isGitHubGhesVersionBelow({ type: util.GitHubVariant.GHES, version: "3.2.0" }, "3.2.0"));
|
||||||
t.true(util.isGitHubGhesVersionBelow({ type: util.GitHubVariant.GHES, version: "3.1.2" }, "3.2.0"));
|
t.true(util.isGitHubGhesVersionBelow({ type: util.GitHubVariant.GHES, version: "3.1.2" }, "3.2.0"));
|
||||||
});
|
});
|
||||||
|
function formatGitHubVersion(version) {
|
||||||
|
switch (version.type) {
|
||||||
|
case util.GitHubVariant.DOTCOM:
|
||||||
|
return "dotcom";
|
||||||
|
case util.GitHubVariant.GHAE:
|
||||||
|
return "GHAE";
|
||||||
|
case util.GitHubVariant.GHES:
|
||||||
|
return `GHES ${version.version}`;
|
||||||
|
default:
|
||||||
|
util.assertNever(version);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const CHECK_ACTION_VERSION_TESTS = [
|
||||||
|
["1.2.1", { type: util.GitHubVariant.DOTCOM }, true],
|
||||||
|
["1.2.1", { type: util.GitHubVariant.GHAE }, true],
|
||||||
|
["1.2.1", { type: util.GitHubVariant.GHES, version: "3.3" }, false],
|
||||||
|
["1.2.1", { type: util.GitHubVariant.GHES, version: "3.4" }, true],
|
||||||
|
["1.2.1", { type: util.GitHubVariant.GHES, version: "3.5" }, true],
|
||||||
|
["2.2.1", { type: util.GitHubVariant.DOTCOM }, false],
|
||||||
|
["2.2.1", { type: util.GitHubVariant.GHAE }, false],
|
||||||
|
["2.2.1", { type: util.GitHubVariant.GHES, version: "3.3" }, false],
|
||||||
|
["2.2.1", { type: util.GitHubVariant.GHES, version: "3.4" }, false],
|
||||||
|
["2.2.1", { type: util.GitHubVariant.GHES, version: "3.5" }, false],
|
||||||
|
];
|
||||||
|
for (const [version, githubVersion, shouldReportWarning,] of CHECK_ACTION_VERSION_TESTS) {
|
||||||
|
const reportWarningDescription = shouldReportWarning
|
||||||
|
? "reports warning"
|
||||||
|
: "doesn't report warning";
|
||||||
|
const versionsDescription = `CodeQL Action version ${version} and GitHub version ${formatGitHubVersion(githubVersion)}`;
|
||||||
|
(0, ava_1.default)(`checkActionVersion ${reportWarningDescription} for ${versionsDescription}`, async (t) => {
|
||||||
|
const warningSpy = sinon.spy(core, "warning");
|
||||||
|
const versionStub = sinon
|
||||||
|
.stub(api, "getGitHubVersionActionsOnly")
|
||||||
|
.resolves(githubVersion);
|
||||||
|
const isActionsStub = sinon.stub(util, "isActions").returns(true);
|
||||||
|
await util.checkActionVersion(version);
|
||||||
|
if (shouldReportWarning) {
|
||||||
|
t.true(warningSpy.calledOnceWithExactly(sinon.match("CodeQL Action v1 will be deprecated")));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
t.false(warningSpy.called);
|
||||||
|
}
|
||||||
|
versionStub.restore();
|
||||||
|
isActionsStub.restore();
|
||||||
|
});
|
||||||
|
}
|
||||||
//# sourceMappingURL=util.test.js.map
|
//# sourceMappingURL=util.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
38
node_modules/.package-lock.json
generated
vendored
38
node_modules/.package-lock.json
generated
vendored
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.1.9",
|
"version": "2.1.11",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
@@ -15,6 +15,14 @@
|
|||||||
"tmp-promise": "^3.0.2"
|
"tmp-promise": "^3.0.2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@actions/artifact/node_modules/@actions/http-client": {
|
||||||
|
"version": "1.0.11",
|
||||||
|
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz",
|
||||||
|
"integrity": "sha512-VRYHGQV1rqnROJqdMvGUbY/Kn8vriQe/F9HR2AlYHzmKuM/p3kjNuXhmdBfcVgsvRWTz5C5XW5xvndZrVBuAYg==",
|
||||||
|
"dependencies": {
|
||||||
|
"tunnel": "0.0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@actions/core": {
|
"node_modules/@actions/core": {
|
||||||
"version": "1.4.0",
|
"version": "1.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.4.0.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.4.0.tgz",
|
||||||
@@ -38,7 +46,7 @@
|
|||||||
"@octokit/plugin-rest-endpoint-methods": "^4.0.0"
|
"@octokit/plugin-rest-endpoint-methods": "^4.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/http-client": {
|
"node_modules/@actions/github/node_modules/@actions/http-client": {
|
||||||
"version": "1.0.11",
|
"version": "1.0.11",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz",
|
||||||
"integrity": "sha512-VRYHGQV1rqnROJqdMvGUbY/Kn8vriQe/F9HR2AlYHzmKuM/p3kjNuXhmdBfcVgsvRWTz5C5XW5xvndZrVBuAYg==",
|
"integrity": "sha512-VRYHGQV1rqnROJqdMvGUbY/Kn8vriQe/F9HR2AlYHzmKuM/p3kjNuXhmdBfcVgsvRWTz5C5XW5xvndZrVBuAYg==",
|
||||||
@@ -46,12 +54,10 @@
|
|||||||
"tunnel": "0.0.6"
|
"tunnel": "0.0.6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/http-client/node_modules/tunnel": {
|
"node_modules/@actions/http-client": {
|
||||||
"version": "0.0.6",
|
"version": "2.0.0",
|
||||||
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==",
|
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.0.tgz",
|
||||||
"engines": {
|
"integrity": "sha512-fm1+OPPey5ypgStT9K8zbBhICj4J4UV/TJIHDhuWlkb8KyJaAtjcZK184dTqul0dV0nPKX97FNtDXX20BTLXSA=="
|
||||||
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"node_modules/@actions/io": {
|
"node_modules/@actions/io": {
|
||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
@@ -59,13 +65,13 @@
|
|||||||
"integrity": "sha512-Qi4JoKXjmE0O67wAOH6y0n26QXhMKMFo7GD/4IXNVcrtLjUlGjGuVys6pQgwF3ArfGTQu0XpqaNr0YhED2RaRA=="
|
"integrity": "sha512-Qi4JoKXjmE0O67wAOH6y0n26QXhMKMFo7GD/4IXNVcrtLjUlGjGuVys6pQgwF3ArfGTQu0XpqaNr0YhED2RaRA=="
|
||||||
},
|
},
|
||||||
"node_modules/@actions/tool-cache": {
|
"node_modules/@actions/tool-cache": {
|
||||||
"version": "1.7.1",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/tool-cache/-/tool-cache-1.7.1.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/tool-cache/-/tool-cache-2.0.0.tgz",
|
||||||
"integrity": "sha512-y1xxxOhXaBUIUit3lhepmu/0xdgiTMpnZRLmVdtF0hTm521doi+MdRRRP62czHvM7wxH6epj4JPNJQ3iJpOrkQ==",
|
"integrity": "sha512-/5/R16ddC2hw48UO5T/d1gwzsyZjBTttGLV74xnLgfD/7nJKk17ArgCi3YXj3et/IAatMDEqaTM5kw5Hevgd5A==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.2.6",
|
"@actions/core": "^1.2.6",
|
||||||
"@actions/exec": "^1.0.0",
|
"@actions/exec": "^1.0.0",
|
||||||
"@actions/http-client": "^1.0.8",
|
"@actions/http-client": "^2.0.0",
|
||||||
"@actions/io": "^1.1.1",
|
"@actions/io": "^1.1.1",
|
||||||
"semver": "^6.1.0",
|
"semver": "^6.1.0",
|
||||||
"uuid": "^3.3.2"
|
"uuid": "^3.3.2"
|
||||||
@@ -5072,6 +5078,14 @@
|
|||||||
"typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta"
|
"typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/tunnel": {
|
||||||
|
"version": "0.0.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
|
||||||
|
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/type-check": {
|
"node_modules/type-check": {
|
||||||
"version": "0.4.0",
|
"version": "0.4.0",
|
||||||
"integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
|
"integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
|
||||||
|
|||||||
21
node_modules/@actions/artifact/node_modules/@actions/http-client/LICENSE
generated
vendored
Normal file
21
node_modules/@actions/artifact/node_modules/@actions/http-client/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
Actions Http Client for Node.js
|
||||||
|
|
||||||
|
Copyright (c) GitHub, Inc.
|
||||||
|
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
|
||||||
|
associated documentation files (the "Software"), to deal in the Software without restriction,
|
||||||
|
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||||
|
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
|
||||||
|
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||||
|
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||||
|
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||||
|
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
79
node_modules/@actions/artifact/node_modules/@actions/http-client/README.md
generated
vendored
Normal file
79
node_modules/@actions/artifact/node_modules/@actions/http-client/README.md
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<img src="actions.png">
|
||||||
|
</p>
|
||||||
|
|
||||||
|
# Actions Http-Client
|
||||||
|
|
||||||
|
[](https://github.com/actions/http-client/actions)
|
||||||
|
|
||||||
|
A lightweight HTTP client optimized for use with actions, TypeScript with generics and async await.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- HTTP client with TypeScript generics and async/await/Promises
|
||||||
|
- Typings included so no need to acquire separately (great for intellisense and no versioning drift)
|
||||||
|
- [Proxy support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners#using-a-proxy-server-with-self-hosted-runners) just works with actions and the runner
|
||||||
|
- Targets ES2019 (runner runs actions with node 12+). Only supported on node 12+.
|
||||||
|
- Basic, Bearer and PAT Support out of the box. Extensible handlers for others.
|
||||||
|
- Redirects supported
|
||||||
|
|
||||||
|
Features and releases [here](./RELEASES.md)
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
npm install @actions/http-client --save
|
||||||
|
```
|
||||||
|
|
||||||
|
## Samples
|
||||||
|
|
||||||
|
See the [HTTP](./__tests__) tests for detailed examples.
|
||||||
|
|
||||||
|
## Errors
|
||||||
|
|
||||||
|
### HTTP
|
||||||
|
|
||||||
|
The HTTP client does not throw unless truly exceptional.
|
||||||
|
|
||||||
|
* A request that successfully executes resulting in a 404, 500 etc... will return a response object with a status code and a body.
|
||||||
|
* Redirects (3xx) will be followed by default.
|
||||||
|
|
||||||
|
See [HTTP tests](./__tests__) for detailed examples.
|
||||||
|
|
||||||
|
## Debugging
|
||||||
|
|
||||||
|
To enable detailed console logging of all HTTP requests and responses, set the NODE_DEBUG environment varible:
|
||||||
|
|
||||||
|
```
|
||||||
|
export NODE_DEBUG=http
|
||||||
|
```
|
||||||
|
|
||||||
|
## Node support
|
||||||
|
|
||||||
|
The http-client is built using the latest LTS version of Node 12. It may work on previous node LTS versions but it's tested and officially supported on Node12+.
|
||||||
|
|
||||||
|
## Support and Versioning
|
||||||
|
|
||||||
|
We follow semver and will hold compatibility between major versions and increment the minor version with new features and capabilities (while holding compat).
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
We welcome PRs. Please create an issue and if applicable, a design before proceeding with code.
|
||||||
|
|
||||||
|
once:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
To build:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
To run all tests:
|
||||||
|
```bash
|
||||||
|
$ npm test
|
||||||
|
```
|
||||||
|
Before Width: | Height: | Size: 33 KiB After Width: | Height: | Size: 33 KiB |
39
node_modules/@actions/artifact/node_modules/@actions/http-client/package.json
generated
vendored
Normal file
39
node_modules/@actions/artifact/node_modules/@actions/http-client/package.json
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
{
|
||||||
|
"name": "@actions/http-client",
|
||||||
|
"version": "1.0.11",
|
||||||
|
"description": "Actions Http Client",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"build": "rm -Rf ./_out && tsc && cp package*.json ./_out && cp *.md ./_out && cp LICENSE ./_out && cp actions.png ./_out",
|
||||||
|
"test": "jest",
|
||||||
|
"format": "prettier --write *.ts && prettier --write **/*.ts",
|
||||||
|
"format-check": "prettier --check *.ts && prettier --check **/*.ts",
|
||||||
|
"audit-check": "npm audit --audit-level=moderate"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/actions/http-client.git"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"Actions",
|
||||||
|
"Http"
|
||||||
|
],
|
||||||
|
"author": "GitHub, Inc.",
|
||||||
|
"license": "MIT",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/actions/http-client/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/actions/http-client#readme",
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/jest": "^25.1.4",
|
||||||
|
"@types/node": "^12.12.31",
|
||||||
|
"jest": "^25.1.0",
|
||||||
|
"prettier": "^2.0.4",
|
||||||
|
"proxy": "^1.0.1",
|
||||||
|
"ts-jest": "^25.2.1",
|
||||||
|
"typescript": "^3.8.3"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"tunnel": "0.0.6"
|
||||||
|
}
|
||||||
|
}
|
||||||
21
node_modules/@actions/github/node_modules/@actions/http-client/LICENSE
generated
vendored
Normal file
21
node_modules/@actions/github/node_modules/@actions/http-client/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
Actions Http Client for Node.js
|
||||||
|
|
||||||
|
Copyright (c) GitHub, Inc.
|
||||||
|
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
|
||||||
|
associated documentation files (the "Software"), to deal in the Software without restriction,
|
||||||
|
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||||
|
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
|
||||||
|
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||||
|
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||||
|
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||||
|
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
79
node_modules/@actions/github/node_modules/@actions/http-client/README.md
generated
vendored
Normal file
79
node_modules/@actions/github/node_modules/@actions/http-client/README.md
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<img src="actions.png">
|
||||||
|
</p>
|
||||||
|
|
||||||
|
# Actions Http-Client
|
||||||
|
|
||||||
|
[](https://github.com/actions/http-client/actions)
|
||||||
|
|
||||||
|
A lightweight HTTP client optimized for use with actions, TypeScript with generics and async await.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- HTTP client with TypeScript generics and async/await/Promises
|
||||||
|
- Typings included so no need to acquire separately (great for intellisense and no versioning drift)
|
||||||
|
- [Proxy support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners#using-a-proxy-server-with-self-hosted-runners) just works with actions and the runner
|
||||||
|
- Targets ES2019 (runner runs actions with node 12+). Only supported on node 12+.
|
||||||
|
- Basic, Bearer and PAT Support out of the box. Extensible handlers for others.
|
||||||
|
- Redirects supported
|
||||||
|
|
||||||
|
Features and releases [here](./RELEASES.md)
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
npm install @actions/http-client --save
|
||||||
|
```
|
||||||
|
|
||||||
|
## Samples
|
||||||
|
|
||||||
|
See the [HTTP](./__tests__) tests for detailed examples.
|
||||||
|
|
||||||
|
## Errors
|
||||||
|
|
||||||
|
### HTTP
|
||||||
|
|
||||||
|
The HTTP client does not throw unless truly exceptional.
|
||||||
|
|
||||||
|
* A request that successfully executes resulting in a 404, 500 etc... will return a response object with a status code and a body.
|
||||||
|
* Redirects (3xx) will be followed by default.
|
||||||
|
|
||||||
|
See [HTTP tests](./__tests__) for detailed examples.
|
||||||
|
|
||||||
|
## Debugging
|
||||||
|
|
||||||
|
To enable detailed console logging of all HTTP requests and responses, set the NODE_DEBUG environment varible:
|
||||||
|
|
||||||
|
```
|
||||||
|
export NODE_DEBUG=http
|
||||||
|
```
|
||||||
|
|
||||||
|
## Node support
|
||||||
|
|
||||||
|
The http-client is built using the latest LTS version of Node 12. It may work on previous node LTS versions but it's tested and officially supported on Node12+.
|
||||||
|
|
||||||
|
## Support and Versioning
|
||||||
|
|
||||||
|
We follow semver and will hold compatibility between major versions and increment the minor version with new features and capabilities (while holding compat).
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
We welcome PRs. Please create an issue and if applicable, a design before proceeding with code.
|
||||||
|
|
||||||
|
once:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
To build:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
To run all tests:
|
||||||
|
```bash
|
||||||
|
$ npm test
|
||||||
|
```
|
||||||
26
node_modules/@actions/github/node_modules/@actions/http-client/RELEASES.md
generated
vendored
Normal file
26
node_modules/@actions/github/node_modules/@actions/http-client/RELEASES.md
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
## Releases
|
||||||
|
|
||||||
|
## 1.0.10
|
||||||
|
|
||||||
|
Contains a bug fix where proxy is defined without a user and password. see [PR here](https://github.com/actions/http-client/pull/42)
|
||||||
|
|
||||||
|
## 1.0.9
|
||||||
|
Throw HttpClientError instead of a generic Error from the \<verb>Json() helper methods when the server responds with a non-successful status code.
|
||||||
|
|
||||||
|
## 1.0.8
|
||||||
|
Fixed security issue where a redirect (e.g. 302) to another domain would pass headers. The fix was to strip the authorization header if the hostname was different. More [details in PR #27](https://github.com/actions/http-client/pull/27)
|
||||||
|
|
||||||
|
## 1.0.7
|
||||||
|
Update NPM dependencies and add 429 to the list of HttpCodes
|
||||||
|
|
||||||
|
## 1.0.6
|
||||||
|
Automatically sends Content-Type and Accept application/json headers for \<verb>Json() helper methods if not set in the client or parameters.
|
||||||
|
|
||||||
|
## 1.0.5
|
||||||
|
Adds \<verb>Json() helper methods for json over http scenarios.
|
||||||
|
|
||||||
|
## 1.0.4
|
||||||
|
Started to add \<verb>Json() helper methods. Do not use this release for that. Use >= 1.0.5 since there was an issue with types.
|
||||||
|
|
||||||
|
## 1.0.1 to 1.0.3
|
||||||
|
Adds proxy support.
|
||||||
BIN
node_modules/@actions/github/node_modules/@actions/http-client/actions.png
generated
vendored
Normal file
BIN
node_modules/@actions/github/node_modules/@actions/http-client/actions.png
generated
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 33 KiB |
23
node_modules/@actions/github/node_modules/@actions/http-client/auth.d.ts
generated
vendored
Normal file
23
node_modules/@actions/github/node_modules/@actions/http-client/auth.d.ts
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import ifm = require('./interfaces');
|
||||||
|
export declare class BasicCredentialHandler implements ifm.IRequestHandler {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
constructor(username: string, password: string);
|
||||||
|
prepareRequest(options: any): void;
|
||||||
|
canHandleAuthentication(response: ifm.IHttpClientResponse): boolean;
|
||||||
|
handleAuthentication(httpClient: ifm.IHttpClient, requestInfo: ifm.IRequestInfo, objs: any): Promise<ifm.IHttpClientResponse>;
|
||||||
|
}
|
||||||
|
export declare class BearerCredentialHandler implements ifm.IRequestHandler {
|
||||||
|
token: string;
|
||||||
|
constructor(token: string);
|
||||||
|
prepareRequest(options: any): void;
|
||||||
|
canHandleAuthentication(response: ifm.IHttpClientResponse): boolean;
|
||||||
|
handleAuthentication(httpClient: ifm.IHttpClient, requestInfo: ifm.IRequestInfo, objs: any): Promise<ifm.IHttpClientResponse>;
|
||||||
|
}
|
||||||
|
export declare class PersonalAccessTokenCredentialHandler implements ifm.IRequestHandler {
|
||||||
|
token: string;
|
||||||
|
constructor(token: string);
|
||||||
|
prepareRequest(options: any): void;
|
||||||
|
canHandleAuthentication(response: ifm.IHttpClientResponse): boolean;
|
||||||
|
handleAuthentication(httpClient: ifm.IHttpClient, requestInfo: ifm.IRequestInfo, objs: any): Promise<ifm.IHttpClientResponse>;
|
||||||
|
}
|
||||||
58
node_modules/@actions/github/node_modules/@actions/http-client/auth.js
generated
vendored
Normal file
58
node_modules/@actions/github/node_modules/@actions/http-client/auth.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
class BasicCredentialHandler {
|
||||||
|
constructor(username, password) {
|
||||||
|
this.username = username;
|
||||||
|
this.password = password;
|
||||||
|
}
|
||||||
|
prepareRequest(options) {
|
||||||
|
options.headers['Authorization'] =
|
||||||
|
'Basic ' +
|
||||||
|
Buffer.from(this.username + ':' + this.password).toString('base64');
|
||||||
|
}
|
||||||
|
// This handler cannot handle 401
|
||||||
|
canHandleAuthentication(response) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
handleAuthentication(httpClient, requestInfo, objs) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.BasicCredentialHandler = BasicCredentialHandler;
|
||||||
|
class BearerCredentialHandler {
|
||||||
|
constructor(token) {
|
||||||
|
this.token = token;
|
||||||
|
}
|
||||||
|
// currently implements pre-authorization
|
||||||
|
// TODO: support preAuth = false where it hooks on 401
|
||||||
|
prepareRequest(options) {
|
||||||
|
options.headers['Authorization'] = 'Bearer ' + this.token;
|
||||||
|
}
|
||||||
|
// This handler cannot handle 401
|
||||||
|
canHandleAuthentication(response) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
handleAuthentication(httpClient, requestInfo, objs) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.BearerCredentialHandler = BearerCredentialHandler;
|
||||||
|
class PersonalAccessTokenCredentialHandler {
|
||||||
|
constructor(token) {
|
||||||
|
this.token = token;
|
||||||
|
}
|
||||||
|
// currently implements pre-authorization
|
||||||
|
// TODO: support preAuth = false where it hooks on 401
|
||||||
|
prepareRequest(options) {
|
||||||
|
options.headers['Authorization'] =
|
||||||
|
'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
||||||
|
}
|
||||||
|
// This handler cannot handle 401
|
||||||
|
canHandleAuthentication(response) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
handleAuthentication(httpClient, requestInfo, objs) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
|
||||||
124
node_modules/@actions/github/node_modules/@actions/http-client/index.d.ts
generated
vendored
Normal file
124
node_modules/@actions/github/node_modules/@actions/http-client/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
import http = require('http');
|
||||||
|
import ifm = require('./interfaces');
|
||||||
|
export declare enum HttpCodes {
|
||||||
|
OK = 200,
|
||||||
|
MultipleChoices = 300,
|
||||||
|
MovedPermanently = 301,
|
||||||
|
ResourceMoved = 302,
|
||||||
|
SeeOther = 303,
|
||||||
|
NotModified = 304,
|
||||||
|
UseProxy = 305,
|
||||||
|
SwitchProxy = 306,
|
||||||
|
TemporaryRedirect = 307,
|
||||||
|
PermanentRedirect = 308,
|
||||||
|
BadRequest = 400,
|
||||||
|
Unauthorized = 401,
|
||||||
|
PaymentRequired = 402,
|
||||||
|
Forbidden = 403,
|
||||||
|
NotFound = 404,
|
||||||
|
MethodNotAllowed = 405,
|
||||||
|
NotAcceptable = 406,
|
||||||
|
ProxyAuthenticationRequired = 407,
|
||||||
|
RequestTimeout = 408,
|
||||||
|
Conflict = 409,
|
||||||
|
Gone = 410,
|
||||||
|
TooManyRequests = 429,
|
||||||
|
InternalServerError = 500,
|
||||||
|
NotImplemented = 501,
|
||||||
|
BadGateway = 502,
|
||||||
|
ServiceUnavailable = 503,
|
||||||
|
GatewayTimeout = 504
|
||||||
|
}
|
||||||
|
export declare enum Headers {
|
||||||
|
Accept = "accept",
|
||||||
|
ContentType = "content-type"
|
||||||
|
}
|
||||||
|
export declare enum MediaTypes {
|
||||||
|
ApplicationJson = "application/json"
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
|
*/
|
||||||
|
export declare function getProxyUrl(serverUrl: string): string;
|
||||||
|
export declare class HttpClientError extends Error {
|
||||||
|
constructor(message: string, statusCode: number);
|
||||||
|
statusCode: number;
|
||||||
|
result?: any;
|
||||||
|
}
|
||||||
|
export declare class HttpClientResponse implements ifm.IHttpClientResponse {
|
||||||
|
constructor(message: http.IncomingMessage);
|
||||||
|
message: http.IncomingMessage;
|
||||||
|
readBody(): Promise<string>;
|
||||||
|
}
|
||||||
|
export declare function isHttps(requestUrl: string): boolean;
|
||||||
|
export declare class HttpClient {
|
||||||
|
userAgent: string | undefined;
|
||||||
|
handlers: ifm.IRequestHandler[];
|
||||||
|
requestOptions: ifm.IRequestOptions;
|
||||||
|
private _ignoreSslError;
|
||||||
|
private _socketTimeout;
|
||||||
|
private _allowRedirects;
|
||||||
|
private _allowRedirectDowngrade;
|
||||||
|
private _maxRedirects;
|
||||||
|
private _allowRetries;
|
||||||
|
private _maxRetries;
|
||||||
|
private _agent;
|
||||||
|
private _proxyAgent;
|
||||||
|
private _keepAlive;
|
||||||
|
private _disposed;
|
||||||
|
constructor(userAgent?: string, handlers?: ifm.IRequestHandler[], requestOptions?: ifm.IRequestOptions);
|
||||||
|
options(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||||
|
get(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||||
|
del(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||||
|
post(requestUrl: string, data: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||||
|
patch(requestUrl: string, data: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||||
|
put(requestUrl: string, data: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||||
|
head(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||||
|
sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||||
|
/**
|
||||||
|
* Gets a typed object from an endpoint
|
||||||
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||||
|
*/
|
||||||
|
getJson<T>(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||||
|
postJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||||
|
putJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||||
|
patchJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||||
|
/**
|
||||||
|
* Makes a raw http request.
|
||||||
|
* All other methods such as get, post, patch, and request ultimately call this.
|
||||||
|
* Prefer get, del, post and patch
|
||||||
|
*/
|
||||||
|
request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream, headers: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||||
|
/**
|
||||||
|
* Needs to be called if keepAlive is set to true in request options.
|
||||||
|
*/
|
||||||
|
dispose(): void;
|
||||||
|
/**
|
||||||
|
* Raw request.
|
||||||
|
* @param info
|
||||||
|
* @param data
|
||||||
|
*/
|
||||||
|
requestRaw(info: ifm.IRequestInfo, data: string | NodeJS.ReadableStream): Promise<ifm.IHttpClientResponse>;
|
||||||
|
/**
|
||||||
|
* Raw request with callback.
|
||||||
|
* @param info
|
||||||
|
* @param data
|
||||||
|
* @param onResult
|
||||||
|
*/
|
||||||
|
requestRawWithCallback(info: ifm.IRequestInfo, data: string | NodeJS.ReadableStream, onResult: (err: any, res: ifm.IHttpClientResponse) => void): void;
|
||||||
|
/**
|
||||||
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
||||||
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
||||||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
|
*/
|
||||||
|
getAgent(serverUrl: string): http.Agent;
|
||||||
|
private _prepareRequest;
|
||||||
|
private _mergeHeaders;
|
||||||
|
private _getExistingOrDefaultHeader;
|
||||||
|
private _getAgent;
|
||||||
|
private _performExponentialBackoff;
|
||||||
|
private static dateTimeDeserializer;
|
||||||
|
private _processResponse;
|
||||||
|
}
|
||||||
537
node_modules/@actions/github/node_modules/@actions/http-client/index.js
generated
vendored
Normal file
537
node_modules/@actions/github/node_modules/@actions/http-client/index.js
generated
vendored
Normal file
@@ -0,0 +1,537 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const http = require("http");
|
||||||
|
const https = require("https");
|
||||||
|
const pm = require("./proxy");
|
||||||
|
let tunnel;
|
||||||
|
var HttpCodes;
|
||||||
|
(function (HttpCodes) {
|
||||||
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
||||||
|
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
|
||||||
|
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
|
||||||
|
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
|
||||||
|
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
|
||||||
|
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
|
||||||
|
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
|
||||||
|
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
|
||||||
|
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
|
||||||
|
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
|
||||||
|
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
|
||||||
|
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
|
||||||
|
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
|
||||||
|
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
|
||||||
|
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
|
||||||
|
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
|
||||||
|
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
|
||||||
|
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
|
||||||
|
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
||||||
|
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
||||||
|
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
||||||
|
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
||||||
|
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
||||||
|
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
||||||
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
||||||
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
||||||
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
||||||
|
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
||||||
|
var Headers;
|
||||||
|
(function (Headers) {
|
||||||
|
Headers["Accept"] = "accept";
|
||||||
|
Headers["ContentType"] = "content-type";
|
||||||
|
})(Headers = exports.Headers || (exports.Headers = {}));
|
||||||
|
var MediaTypes;
|
||||||
|
(function (MediaTypes) {
|
||||||
|
MediaTypes["ApplicationJson"] = "application/json";
|
||||||
|
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
||||||
|
/**
|
||||||
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
|
*/
|
||||||
|
function getProxyUrl(serverUrl) {
|
||||||
|
let proxyUrl = pm.getProxyUrl(new URL(serverUrl));
|
||||||
|
return proxyUrl ? proxyUrl.href : '';
|
||||||
|
}
|
||||||
|
exports.getProxyUrl = getProxyUrl;
|
||||||
|
const HttpRedirectCodes = [
|
||||||
|
HttpCodes.MovedPermanently,
|
||||||
|
HttpCodes.ResourceMoved,
|
||||||
|
HttpCodes.SeeOther,
|
||||||
|
HttpCodes.TemporaryRedirect,
|
||||||
|
HttpCodes.PermanentRedirect
|
||||||
|
];
|
||||||
|
const HttpResponseRetryCodes = [
|
||||||
|
HttpCodes.BadGateway,
|
||||||
|
HttpCodes.ServiceUnavailable,
|
||||||
|
HttpCodes.GatewayTimeout
|
||||||
|
];
|
||||||
|
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
||||||
|
const ExponentialBackoffCeiling = 10;
|
||||||
|
const ExponentialBackoffTimeSlice = 5;
|
||||||
|
class HttpClientError extends Error {
|
||||||
|
constructor(message, statusCode) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'HttpClientError';
|
||||||
|
this.statusCode = statusCode;
|
||||||
|
Object.setPrototypeOf(this, HttpClientError.prototype);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.HttpClientError = HttpClientError;
|
||||||
|
class HttpClientResponse {
|
||||||
|
constructor(message) {
|
||||||
|
this.message = message;
|
||||||
|
}
|
||||||
|
readBody() {
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
let output = Buffer.alloc(0);
|
||||||
|
this.message.on('data', (chunk) => {
|
||||||
|
output = Buffer.concat([output, chunk]);
|
||||||
|
});
|
||||||
|
this.message.on('end', () => {
|
||||||
|
resolve(output.toString());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.HttpClientResponse = HttpClientResponse;
|
||||||
|
function isHttps(requestUrl) {
|
||||||
|
let parsedUrl = new URL(requestUrl);
|
||||||
|
return parsedUrl.protocol === 'https:';
|
||||||
|
}
|
||||||
|
exports.isHttps = isHttps;
|
||||||
|
class HttpClient {
|
||||||
|
constructor(userAgent, handlers, requestOptions) {
|
||||||
|
this._ignoreSslError = false;
|
||||||
|
this._allowRedirects = true;
|
||||||
|
this._allowRedirectDowngrade = false;
|
||||||
|
this._maxRedirects = 50;
|
||||||
|
this._allowRetries = false;
|
||||||
|
this._maxRetries = 1;
|
||||||
|
this._keepAlive = false;
|
||||||
|
this._disposed = false;
|
||||||
|
this.userAgent = userAgent;
|
||||||
|
this.handlers = handlers || [];
|
||||||
|
this.requestOptions = requestOptions;
|
||||||
|
if (requestOptions) {
|
||||||
|
if (requestOptions.ignoreSslError != null) {
|
||||||
|
this._ignoreSslError = requestOptions.ignoreSslError;
|
||||||
|
}
|
||||||
|
this._socketTimeout = requestOptions.socketTimeout;
|
||||||
|
if (requestOptions.allowRedirects != null) {
|
||||||
|
this._allowRedirects = requestOptions.allowRedirects;
|
||||||
|
}
|
||||||
|
if (requestOptions.allowRedirectDowngrade != null) {
|
||||||
|
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
|
||||||
|
}
|
||||||
|
if (requestOptions.maxRedirects != null) {
|
||||||
|
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
|
||||||
|
}
|
||||||
|
if (requestOptions.keepAlive != null) {
|
||||||
|
this._keepAlive = requestOptions.keepAlive;
|
||||||
|
}
|
||||||
|
if (requestOptions.allowRetries != null) {
|
||||||
|
this._allowRetries = requestOptions.allowRetries;
|
||||||
|
}
|
||||||
|
if (requestOptions.maxRetries != null) {
|
||||||
|
this._maxRetries = requestOptions.maxRetries;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
options(requestUrl, additionalHeaders) {
|
||||||
|
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
|
||||||
|
}
|
||||||
|
get(requestUrl, additionalHeaders) {
|
||||||
|
return this.request('GET', requestUrl, null, additionalHeaders || {});
|
||||||
|
}
|
||||||
|
del(requestUrl, additionalHeaders) {
|
||||||
|
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
|
||||||
|
}
|
||||||
|
post(requestUrl, data, additionalHeaders) {
|
||||||
|
return this.request('POST', requestUrl, data, additionalHeaders || {});
|
||||||
|
}
|
||||||
|
patch(requestUrl, data, additionalHeaders) {
|
||||||
|
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
|
||||||
|
}
|
||||||
|
put(requestUrl, data, additionalHeaders) {
|
||||||
|
return this.request('PUT', requestUrl, data, additionalHeaders || {});
|
||||||
|
}
|
||||||
|
head(requestUrl, additionalHeaders) {
|
||||||
|
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
|
||||||
|
}
|
||||||
|
sendStream(verb, requestUrl, stream, additionalHeaders) {
|
||||||
|
return this.request(verb, requestUrl, stream, additionalHeaders);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets a typed object from an endpoint
|
||||||
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||||
|
*/
|
||||||
|
async getJson(requestUrl, additionalHeaders = {}) {
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
let res = await this.get(requestUrl, additionalHeaders);
|
||||||
|
return this._processResponse(res, this.requestOptions);
|
||||||
|
}
|
||||||
|
async postJson(requestUrl, obj, additionalHeaders = {}) {
|
||||||
|
let data = JSON.stringify(obj, null, 2);
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||||
|
let res = await this.post(requestUrl, data, additionalHeaders);
|
||||||
|
return this._processResponse(res, this.requestOptions);
|
||||||
|
}
|
||||||
|
async putJson(requestUrl, obj, additionalHeaders = {}) {
|
||||||
|
let data = JSON.stringify(obj, null, 2);
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||||
|
let res = await this.put(requestUrl, data, additionalHeaders);
|
||||||
|
return this._processResponse(res, this.requestOptions);
|
||||||
|
}
|
||||||
|
async patchJson(requestUrl, obj, additionalHeaders = {}) {
|
||||||
|
let data = JSON.stringify(obj, null, 2);
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||||
|
let res = await this.patch(requestUrl, data, additionalHeaders);
|
||||||
|
return this._processResponse(res, this.requestOptions);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Makes a raw http request.
|
||||||
|
* All other methods such as get, post, patch, and request ultimately call this.
|
||||||
|
* Prefer get, del, post and patch
|
||||||
|
*/
|
||||||
|
async request(verb, requestUrl, data, headers) {
|
||||||
|
if (this._disposed) {
|
||||||
|
throw new Error('Client has already been disposed.');
|
||||||
|
}
|
||||||
|
let parsedUrl = new URL(requestUrl);
|
||||||
|
let info = this._prepareRequest(verb, parsedUrl, headers);
|
||||||
|
// Only perform retries on reads since writes may not be idempotent.
|
||||||
|
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
|
||||||
|
? this._maxRetries + 1
|
||||||
|
: 1;
|
||||||
|
let numTries = 0;
|
||||||
|
let response;
|
||||||
|
while (numTries < maxTries) {
|
||||||
|
response = await this.requestRaw(info, data);
|
||||||
|
// Check if it's an authentication challenge
|
||||||
|
if (response &&
|
||||||
|
response.message &&
|
||||||
|
response.message.statusCode === HttpCodes.Unauthorized) {
|
||||||
|
let authenticationHandler;
|
||||||
|
for (let i = 0; i < this.handlers.length; i++) {
|
||||||
|
if (this.handlers[i].canHandleAuthentication(response)) {
|
||||||
|
authenticationHandler = this.handlers[i];
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (authenticationHandler) {
|
||||||
|
return authenticationHandler.handleAuthentication(this, info, data);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// We have received an unauthorized response but have no handlers to handle it.
|
||||||
|
// Let the response return to the caller.
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let redirectsRemaining = this._maxRedirects;
|
||||||
|
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
|
||||||
|
this._allowRedirects &&
|
||||||
|
redirectsRemaining > 0) {
|
||||||
|
const redirectUrl = response.message.headers['location'];
|
||||||
|
if (!redirectUrl) {
|
||||||
|
// if there's no location to redirect to, we won't
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let parsedRedirectUrl = new URL(redirectUrl);
|
||||||
|
if (parsedUrl.protocol == 'https:' &&
|
||||||
|
parsedUrl.protocol != parsedRedirectUrl.protocol &&
|
||||||
|
!this._allowRedirectDowngrade) {
|
||||||
|
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
||||||
|
}
|
||||||
|
// we need to finish reading the response before reassigning response
|
||||||
|
// which will leak the open socket.
|
||||||
|
await response.readBody();
|
||||||
|
// strip authorization header if redirected to a different hostname
|
||||||
|
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
||||||
|
for (let header in headers) {
|
||||||
|
// header names are case insensitive
|
||||||
|
if (header.toLowerCase() === 'authorization') {
|
||||||
|
delete headers[header];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// let's make the request with the new redirectUrl
|
||||||
|
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
||||||
|
response = await this.requestRaw(info, data);
|
||||||
|
redirectsRemaining--;
|
||||||
|
}
|
||||||
|
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
|
||||||
|
// If not a retry code, return immediately instead of retrying
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
numTries += 1;
|
||||||
|
if (numTries < maxTries) {
|
||||||
|
await response.readBody();
|
||||||
|
await this._performExponentialBackoff(numTries);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Needs to be called if keepAlive is set to true in request options.
|
||||||
|
*/
|
||||||
|
dispose() {
|
||||||
|
if (this._agent) {
|
||||||
|
this._agent.destroy();
|
||||||
|
}
|
||||||
|
this._disposed = true;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Raw request.
|
||||||
|
* @param info
|
||||||
|
* @param data
|
||||||
|
*/
|
||||||
|
requestRaw(info, data) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
let callbackForResult = function (err, res) {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
}
|
||||||
|
resolve(res);
|
||||||
|
};
|
||||||
|
this.requestRawWithCallback(info, data, callbackForResult);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Raw request with callback.
|
||||||
|
* @param info
|
||||||
|
* @param data
|
||||||
|
* @param onResult
|
||||||
|
*/
|
||||||
|
requestRawWithCallback(info, data, onResult) {
|
||||||
|
let socket;
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
||||||
|
}
|
||||||
|
let callbackCalled = false;
|
||||||
|
let handleResult = (err, res) => {
|
||||||
|
if (!callbackCalled) {
|
||||||
|
callbackCalled = true;
|
||||||
|
onResult(err, res);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let req = info.httpModule.request(info.options, (msg) => {
|
||||||
|
let res = new HttpClientResponse(msg);
|
||||||
|
handleResult(null, res);
|
||||||
|
});
|
||||||
|
req.on('socket', sock => {
|
||||||
|
socket = sock;
|
||||||
|
});
|
||||||
|
// If we ever get disconnected, we want the socket to timeout eventually
|
||||||
|
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
|
||||||
|
if (socket) {
|
||||||
|
socket.end();
|
||||||
|
}
|
||||||
|
handleResult(new Error('Request timeout: ' + info.options.path), null);
|
||||||
|
});
|
||||||
|
req.on('error', function (err) {
|
||||||
|
// err has statusCode property
|
||||||
|
// res should have headers
|
||||||
|
handleResult(err, null);
|
||||||
|
});
|
||||||
|
if (data && typeof data === 'string') {
|
||||||
|
req.write(data, 'utf8');
|
||||||
|
}
|
||||||
|
if (data && typeof data !== 'string') {
|
||||||
|
data.on('close', function () {
|
||||||
|
req.end();
|
||||||
|
});
|
||||||
|
data.pipe(req);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
req.end();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
||||||
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
||||||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
|
*/
|
||||||
|
getAgent(serverUrl) {
|
||||||
|
let parsedUrl = new URL(serverUrl);
|
||||||
|
return this._getAgent(parsedUrl);
|
||||||
|
}
|
||||||
|
_prepareRequest(method, requestUrl, headers) {
|
||||||
|
const info = {};
|
||||||
|
info.parsedUrl = requestUrl;
|
||||||
|
const usingSsl = info.parsedUrl.protocol === 'https:';
|
||||||
|
info.httpModule = usingSsl ? https : http;
|
||||||
|
const defaultPort = usingSsl ? 443 : 80;
|
||||||
|
info.options = {};
|
||||||
|
info.options.host = info.parsedUrl.hostname;
|
||||||
|
info.options.port = info.parsedUrl.port
|
||||||
|
? parseInt(info.parsedUrl.port)
|
||||||
|
: defaultPort;
|
||||||
|
info.options.path =
|
||||||
|
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||||
|
info.options.method = method;
|
||||||
|
info.options.headers = this._mergeHeaders(headers);
|
||||||
|
if (this.userAgent != null) {
|
||||||
|
info.options.headers['user-agent'] = this.userAgent;
|
||||||
|
}
|
||||||
|
info.options.agent = this._getAgent(info.parsedUrl);
|
||||||
|
// gives handlers an opportunity to participate
|
||||||
|
if (this.handlers) {
|
||||||
|
this.handlers.forEach(handler => {
|
||||||
|
handler.prepareRequest(info.options);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return info;
|
||||||
|
}
|
||||||
|
_mergeHeaders(headers) {
|
||||||
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||||
|
if (this.requestOptions && this.requestOptions.headers) {
|
||||||
|
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
||||||
|
}
|
||||||
|
return lowercaseKeys(headers || {});
|
||||||
|
}
|
||||||
|
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
||||||
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||||
|
let clientHeader;
|
||||||
|
if (this.requestOptions && this.requestOptions.headers) {
|
||||||
|
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
||||||
|
}
|
||||||
|
return additionalHeaders[header] || clientHeader || _default;
|
||||||
|
}
|
||||||
|
_getAgent(parsedUrl) {
|
||||||
|
let agent;
|
||||||
|
let proxyUrl = pm.getProxyUrl(parsedUrl);
|
||||||
|
let useProxy = proxyUrl && proxyUrl.hostname;
|
||||||
|
if (this._keepAlive && useProxy) {
|
||||||
|
agent = this._proxyAgent;
|
||||||
|
}
|
||||||
|
if (this._keepAlive && !useProxy) {
|
||||||
|
agent = this._agent;
|
||||||
|
}
|
||||||
|
// if agent is already assigned use that agent.
|
||||||
|
if (!!agent) {
|
||||||
|
return agent;
|
||||||
|
}
|
||||||
|
const usingSsl = parsedUrl.protocol === 'https:';
|
||||||
|
let maxSockets = 100;
|
||||||
|
if (!!this.requestOptions) {
|
||||||
|
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
|
||||||
|
}
|
||||||
|
if (useProxy) {
|
||||||
|
// If using proxy, need tunnel
|
||||||
|
if (!tunnel) {
|
||||||
|
tunnel = require('tunnel');
|
||||||
|
}
|
||||||
|
const agentOptions = {
|
||||||
|
maxSockets: maxSockets,
|
||||||
|
keepAlive: this._keepAlive,
|
||||||
|
proxy: {
|
||||||
|
...((proxyUrl.username || proxyUrl.password) && {
|
||||||
|
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
|
||||||
|
}),
|
||||||
|
host: proxyUrl.hostname,
|
||||||
|
port: proxyUrl.port
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let tunnelAgent;
|
||||||
|
const overHttps = proxyUrl.protocol === 'https:';
|
||||||
|
if (usingSsl) {
|
||||||
|
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
|
||||||
|
}
|
||||||
|
agent = tunnelAgent(agentOptions);
|
||||||
|
this._proxyAgent = agent;
|
||||||
|
}
|
||||||
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
||||||
|
if (this._keepAlive && !agent) {
|
||||||
|
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
|
||||||
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
||||||
|
this._agent = agent;
|
||||||
|
}
|
||||||
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
||||||
|
if (!agent) {
|
||||||
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
||||||
|
}
|
||||||
|
if (usingSsl && this._ignoreSslError) {
|
||||||
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||||
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||||
|
// we have to cast it to any and change it directly
|
||||||
|
agent.options = Object.assign(agent.options || {}, {
|
||||||
|
rejectUnauthorized: false
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return agent;
|
||||||
|
}
|
||||||
|
_performExponentialBackoff(retryNumber) {
|
||||||
|
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
||||||
|
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
|
||||||
|
return new Promise(resolve => setTimeout(() => resolve(), ms));
|
||||||
|
}
|
||||||
|
static dateTimeDeserializer(key, value) {
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
let a = new Date(value);
|
||||||
|
if (!isNaN(a.valueOf())) {
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
async _processResponse(res, options) {
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
const statusCode = res.message.statusCode;
|
||||||
|
const response = {
|
||||||
|
statusCode: statusCode,
|
||||||
|
result: null,
|
||||||
|
headers: {}
|
||||||
|
};
|
||||||
|
// not found leads to null obj returned
|
||||||
|
if (statusCode == HttpCodes.NotFound) {
|
||||||
|
resolve(response);
|
||||||
|
}
|
||||||
|
let obj;
|
||||||
|
let contents;
|
||||||
|
// get the result from the body
|
||||||
|
try {
|
||||||
|
contents = await res.readBody();
|
||||||
|
if (contents && contents.length > 0) {
|
||||||
|
if (options && options.deserializeDates) {
|
||||||
|
obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
obj = JSON.parse(contents);
|
||||||
|
}
|
||||||
|
response.result = obj;
|
||||||
|
}
|
||||||
|
response.headers = res.message.headers;
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
// Invalid resource (contents not json); leaving result obj null
|
||||||
|
}
|
||||||
|
// note that 3xx redirects are handled by the http layer.
|
||||||
|
if (statusCode > 299) {
|
||||||
|
let msg;
|
||||||
|
// if exception/error in body, attempt to get better error
|
||||||
|
if (obj && obj.message) {
|
||||||
|
msg = obj.message;
|
||||||
|
}
|
||||||
|
else if (contents && contents.length > 0) {
|
||||||
|
// it may be the case that the exception is in the body message as string
|
||||||
|
msg = contents;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
msg = 'Failed request: (' + statusCode + ')';
|
||||||
|
}
|
||||||
|
let err = new HttpClientError(msg, statusCode);
|
||||||
|
err.result = response.result;
|
||||||
|
reject(err);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resolve(response);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.HttpClient = HttpClient;
|
||||||
49
node_modules/@actions/github/node_modules/@actions/http-client/interfaces.d.ts
generated
vendored
Normal file
49
node_modules/@actions/github/node_modules/@actions/http-client/interfaces.d.ts
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
import http = require('http');
|
||||||
|
export interface IHeaders {
|
||||||
|
[key: string]: any;
|
||||||
|
}
|
||||||
|
export interface IHttpClient {
|
||||||
|
options(requestUrl: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
|
||||||
|
get(requestUrl: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
|
||||||
|
del(requestUrl: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
|
||||||
|
post(requestUrl: string, data: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
|
||||||
|
patch(requestUrl: string, data: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
|
||||||
|
put(requestUrl: string, data: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
|
||||||
|
sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
|
||||||
|
request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream, headers: IHeaders): Promise<IHttpClientResponse>;
|
||||||
|
requestRaw(info: IRequestInfo, data: string | NodeJS.ReadableStream): Promise<IHttpClientResponse>;
|
||||||
|
requestRawWithCallback(info: IRequestInfo, data: string | NodeJS.ReadableStream, onResult: (err: any, res: IHttpClientResponse) => void): void;
|
||||||
|
}
|
||||||
|
export interface IRequestHandler {
|
||||||
|
prepareRequest(options: http.RequestOptions): void;
|
||||||
|
canHandleAuthentication(response: IHttpClientResponse): boolean;
|
||||||
|
handleAuthentication(httpClient: IHttpClient, requestInfo: IRequestInfo, objs: any): Promise<IHttpClientResponse>;
|
||||||
|
}
|
||||||
|
export interface IHttpClientResponse {
|
||||||
|
message: http.IncomingMessage;
|
||||||
|
readBody(): Promise<string>;
|
||||||
|
}
|
||||||
|
export interface IRequestInfo {
|
||||||
|
options: http.RequestOptions;
|
||||||
|
parsedUrl: URL;
|
||||||
|
httpModule: any;
|
||||||
|
}
|
||||||
|
export interface IRequestOptions {
|
||||||
|
headers?: IHeaders;
|
||||||
|
socketTimeout?: number;
|
||||||
|
ignoreSslError?: boolean;
|
||||||
|
allowRedirects?: boolean;
|
||||||
|
allowRedirectDowngrade?: boolean;
|
||||||
|
maxRedirects?: number;
|
||||||
|
maxSockets?: number;
|
||||||
|
keepAlive?: boolean;
|
||||||
|
deserializeDates?: boolean;
|
||||||
|
allowRetries?: boolean;
|
||||||
|
maxRetries?: number;
|
||||||
|
}
|
||||||
|
export interface ITypedResponse<T> {
|
||||||
|
statusCode: number;
|
||||||
|
result: T | null;
|
||||||
|
headers: Object;
|
||||||
|
}
|
||||||
2
node_modules/@actions/github/node_modules/@actions/http-client/interfaces.js
generated
vendored
Normal file
2
node_modules/@actions/github/node_modules/@actions/http-client/interfaces.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
39
node_modules/@actions/github/node_modules/@actions/http-client/package.json
generated
vendored
Normal file
39
node_modules/@actions/github/node_modules/@actions/http-client/package.json
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
{
|
||||||
|
"name": "@actions/http-client",
|
||||||
|
"version": "1.0.11",
|
||||||
|
"description": "Actions Http Client",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"build": "rm -Rf ./_out && tsc && cp package*.json ./_out && cp *.md ./_out && cp LICENSE ./_out && cp actions.png ./_out",
|
||||||
|
"test": "jest",
|
||||||
|
"format": "prettier --write *.ts && prettier --write **/*.ts",
|
||||||
|
"format-check": "prettier --check *.ts && prettier --check **/*.ts",
|
||||||
|
"audit-check": "npm audit --audit-level=moderate"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/actions/http-client.git"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"Actions",
|
||||||
|
"Http"
|
||||||
|
],
|
||||||
|
"author": "GitHub, Inc.",
|
||||||
|
"license": "MIT",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/actions/http-client/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/actions/http-client#readme",
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/jest": "^25.1.4",
|
||||||
|
"@types/node": "^12.12.31",
|
||||||
|
"jest": "^25.1.0",
|
||||||
|
"prettier": "^2.0.4",
|
||||||
|
"proxy": "^1.0.1",
|
||||||
|
"ts-jest": "^25.2.1",
|
||||||
|
"typescript": "^3.8.3"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"tunnel": "0.0.6"
|
||||||
|
}
|
||||||
|
}
|
||||||
2
node_modules/@actions/github/node_modules/@actions/http-client/proxy.d.ts
generated
vendored
Normal file
2
node_modules/@actions/github/node_modules/@actions/http-client/proxy.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
export declare function getProxyUrl(reqUrl: URL): URL | undefined;
|
||||||
|
export declare function checkBypass(reqUrl: URL): boolean;
|
||||||
57
node_modules/@actions/github/node_modules/@actions/http-client/proxy.js
generated
vendored
Normal file
57
node_modules/@actions/github/node_modules/@actions/http-client/proxy.js
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
function getProxyUrl(reqUrl) {
|
||||||
|
let usingSsl = reqUrl.protocol === 'https:';
|
||||||
|
let proxyUrl;
|
||||||
|
if (checkBypass(reqUrl)) {
|
||||||
|
return proxyUrl;
|
||||||
|
}
|
||||||
|
let proxyVar;
|
||||||
|
if (usingSsl) {
|
||||||
|
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
||||||
|
}
|
||||||
|
if (proxyVar) {
|
||||||
|
proxyUrl = new URL(proxyVar);
|
||||||
|
}
|
||||||
|
return proxyUrl;
|
||||||
|
}
|
||||||
|
exports.getProxyUrl = getProxyUrl;
|
||||||
|
function checkBypass(reqUrl) {
|
||||||
|
if (!reqUrl.hostname) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||||
|
if (!noProxy) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// Determine the request port
|
||||||
|
let reqPort;
|
||||||
|
if (reqUrl.port) {
|
||||||
|
reqPort = Number(reqUrl.port);
|
||||||
|
}
|
||||||
|
else if (reqUrl.protocol === 'http:') {
|
||||||
|
reqPort = 80;
|
||||||
|
}
|
||||||
|
else if (reqUrl.protocol === 'https:') {
|
||||||
|
reqPort = 443;
|
||||||
|
}
|
||||||
|
// Format the request hostname and hostname with port
|
||||||
|
let upperReqHosts = [reqUrl.hostname.toUpperCase()];
|
||||||
|
if (typeof reqPort === 'number') {
|
||||||
|
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
||||||
|
}
|
||||||
|
// Compare request host against noproxy
|
||||||
|
for (let upperNoProxyItem of noProxy
|
||||||
|
.split(',')
|
||||||
|
.map(x => x.trim().toUpperCase())
|
||||||
|
.filter(x => x)) {
|
||||||
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
exports.checkBypass = checkBypass;
|
||||||
32
node_modules/@actions/http-client/README.md
generated
vendored
32
node_modules/@actions/http-client/README.md
generated
vendored
@@ -1,18 +1,11 @@
|
|||||||
|
# `@actions/http-client`
|
||||||
|
|
||||||
<p align="center">
|
A lightweight HTTP client optimized for building actions.
|
||||||
<img src="actions.png">
|
|
||||||
</p>
|
|
||||||
|
|
||||||
# Actions Http-Client
|
|
||||||
|
|
||||||
[](https://github.com/actions/http-client/actions)
|
|
||||||
|
|
||||||
A lightweight HTTP client optimized for use with actions, TypeScript with generics and async await.
|
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
- HTTP client with TypeScript generics and async/await/Promises
|
- HTTP client with TypeScript generics and async/await/Promises
|
||||||
- Typings included so no need to acquire separately (great for intellisense and no versioning drift)
|
- Typings included!
|
||||||
- [Proxy support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners#using-a-proxy-server-with-self-hosted-runners) just works with actions and the runner
|
- [Proxy support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners#using-a-proxy-server-with-self-hosted-runners) just works with actions and the runner
|
||||||
- Targets ES2019 (runner runs actions with node 12+). Only supported on node 12+.
|
- Targets ES2019 (runner runs actions with node 12+). Only supported on node 12+.
|
||||||
- Basic, Bearer and PAT Support out of the box. Extensible handlers for others.
|
- Basic, Bearer and PAT Support out of the box. Extensible handlers for others.
|
||||||
@@ -28,7 +21,7 @@ npm install @actions/http-client --save
|
|||||||
|
|
||||||
## Samples
|
## Samples
|
||||||
|
|
||||||
See the [HTTP](./__tests__) tests for detailed examples.
|
See the [tests](./__tests__) for detailed examples.
|
||||||
|
|
||||||
## Errors
|
## Errors
|
||||||
|
|
||||||
@@ -39,13 +32,13 @@ The HTTP client does not throw unless truly exceptional.
|
|||||||
* A request that successfully executes resulting in a 404, 500 etc... will return a response object with a status code and a body.
|
* A request that successfully executes resulting in a 404, 500 etc... will return a response object with a status code and a body.
|
||||||
* Redirects (3xx) will be followed by default.
|
* Redirects (3xx) will be followed by default.
|
||||||
|
|
||||||
See [HTTP tests](./__tests__) for detailed examples.
|
See the [tests](./__tests__) for detailed examples.
|
||||||
|
|
||||||
## Debugging
|
## Debugging
|
||||||
|
|
||||||
To enable detailed console logging of all HTTP requests and responses, set the NODE_DEBUG environment varible:
|
To enable detailed console logging of all HTTP requests and responses, set the NODE_DEBUG environment varible:
|
||||||
|
|
||||||
```
|
```shell
|
||||||
export NODE_DEBUG=http
|
export NODE_DEBUG=http
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -63,17 +56,18 @@ We welcome PRs. Please create an issue and if applicable, a design before proce
|
|||||||
|
|
||||||
once:
|
once:
|
||||||
|
|
||||||
```bash
|
```
|
||||||
$ npm install
|
npm install
|
||||||
```
|
```
|
||||||
|
|
||||||
To build:
|
To build:
|
||||||
|
|
||||||
```bash
|
```
|
||||||
$ npm run build
|
npm run build
|
||||||
```
|
```
|
||||||
|
|
||||||
To run all tests:
|
To run all tests:
|
||||||
```bash
|
|
||||||
$ npm test
|
```
|
||||||
|
npm test
|
||||||
```
|
```
|
||||||
|
|||||||
26
node_modules/@actions/http-client/lib/auth.d.ts
generated
vendored
Normal file
26
node_modules/@actions/http-client/lib/auth.d.ts
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
import * as http from 'http';
|
||||||
|
import * as ifm from './interfaces';
|
||||||
|
import { HttpClientResponse } from './index';
|
||||||
|
export declare class BasicCredentialHandler implements ifm.RequestHandler {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
constructor(username: string, password: string);
|
||||||
|
prepareRequest(options: http.RequestOptions): void;
|
||||||
|
canHandleAuthentication(): boolean;
|
||||||
|
handleAuthentication(): Promise<HttpClientResponse>;
|
||||||
|
}
|
||||||
|
export declare class BearerCredentialHandler implements ifm.RequestHandler {
|
||||||
|
token: string;
|
||||||
|
constructor(token: string);
|
||||||
|
prepareRequest(options: http.RequestOptions): void;
|
||||||
|
canHandleAuthentication(): boolean;
|
||||||
|
handleAuthentication(): Promise<HttpClientResponse>;
|
||||||
|
}
|
||||||
|
export declare class PersonalAccessTokenCredentialHandler implements ifm.RequestHandler {
|
||||||
|
token: string;
|
||||||
|
constructor(token: string);
|
||||||
|
prepareRequest(options: http.RequestOptions): void;
|
||||||
|
canHandleAuthentication(): boolean;
|
||||||
|
handleAuthentication(): Promise<HttpClientResponse>;
|
||||||
|
}
|
||||||
81
node_modules/@actions/http-client/lib/auth.js
generated
vendored
Normal file
81
node_modules/@actions/http-client/lib/auth.js
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
"use strict";
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;
|
||||||
|
class BasicCredentialHandler {
|
||||||
|
constructor(username, password) {
|
||||||
|
this.username = username;
|
||||||
|
this.password = password;
|
||||||
|
}
|
||||||
|
prepareRequest(options) {
|
||||||
|
if (!options.headers) {
|
||||||
|
throw Error('The request has no headers');
|
||||||
|
}
|
||||||
|
options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
|
||||||
|
}
|
||||||
|
// This handler cannot handle 401
|
||||||
|
canHandleAuthentication() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
handleAuthentication() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
throw new Error('not implemented');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.BasicCredentialHandler = BasicCredentialHandler;
|
||||||
|
class BearerCredentialHandler {
|
||||||
|
constructor(token) {
|
||||||
|
this.token = token;
|
||||||
|
}
|
||||||
|
// currently implements pre-authorization
|
||||||
|
// TODO: support preAuth = false where it hooks on 401
|
||||||
|
prepareRequest(options) {
|
||||||
|
if (!options.headers) {
|
||||||
|
throw Error('The request has no headers');
|
||||||
|
}
|
||||||
|
options.headers['Authorization'] = `Bearer ${this.token}`;
|
||||||
|
}
|
||||||
|
// This handler cannot handle 401
|
||||||
|
canHandleAuthentication() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
handleAuthentication() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
throw new Error('not implemented');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.BearerCredentialHandler = BearerCredentialHandler;
|
||||||
|
class PersonalAccessTokenCredentialHandler {
|
||||||
|
constructor(token) {
|
||||||
|
this.token = token;
|
||||||
|
}
|
||||||
|
// currently implements pre-authorization
|
||||||
|
// TODO: support preAuth = false where it hooks on 401
|
||||||
|
prepareRequest(options) {
|
||||||
|
if (!options.headers) {
|
||||||
|
throw Error('The request has no headers');
|
||||||
|
}
|
||||||
|
options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
|
||||||
|
}
|
||||||
|
// This handler cannot handle 401
|
||||||
|
canHandleAuthentication() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
handleAuthentication() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
throw new Error('not implemented');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
|
||||||
|
//# sourceMappingURL=auth.js.map
|
||||||
1
node_modules/@actions/http-client/lib/auth.js.map
generated
vendored
Normal file
1
node_modules/@actions/http-client/lib/auth.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;;AAIA,MAAa,sBAAsB;IAIjC,YAAY,QAAgB,EAAE,QAAgB;QAC5C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;IAC1B,CAAC;IAED,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CACpC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA1BD,wDA0BC;AAED,MAAa,uBAAuB;IAGlC,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,KAAK,EAAE,CAAA;IAC3D,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AAxBD,0DAwBC;AAED,MAAa,oCAAoC;IAI/C,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,OAAO,IAAI,CAAC,KAAK,EAAE,CACpB,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA3BD,oFA2BC"}
|
||||||
123
node_modules/@actions/http-client/lib/index.d.ts
generated
vendored
Normal file
123
node_modules/@actions/http-client/lib/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
import * as http from 'http';
|
||||||
|
import * as ifm from './interfaces';
|
||||||
|
export declare enum HttpCodes {
|
||||||
|
OK = 200,
|
||||||
|
MultipleChoices = 300,
|
||||||
|
MovedPermanently = 301,
|
||||||
|
ResourceMoved = 302,
|
||||||
|
SeeOther = 303,
|
||||||
|
NotModified = 304,
|
||||||
|
UseProxy = 305,
|
||||||
|
SwitchProxy = 306,
|
||||||
|
TemporaryRedirect = 307,
|
||||||
|
PermanentRedirect = 308,
|
||||||
|
BadRequest = 400,
|
||||||
|
Unauthorized = 401,
|
||||||
|
PaymentRequired = 402,
|
||||||
|
Forbidden = 403,
|
||||||
|
NotFound = 404,
|
||||||
|
MethodNotAllowed = 405,
|
||||||
|
NotAcceptable = 406,
|
||||||
|
ProxyAuthenticationRequired = 407,
|
||||||
|
RequestTimeout = 408,
|
||||||
|
Conflict = 409,
|
||||||
|
Gone = 410,
|
||||||
|
TooManyRequests = 429,
|
||||||
|
InternalServerError = 500,
|
||||||
|
NotImplemented = 501,
|
||||||
|
BadGateway = 502,
|
||||||
|
ServiceUnavailable = 503,
|
||||||
|
GatewayTimeout = 504
|
||||||
|
}
|
||||||
|
export declare enum Headers {
|
||||||
|
Accept = "accept",
|
||||||
|
ContentType = "content-type"
|
||||||
|
}
|
||||||
|
export declare enum MediaTypes {
|
||||||
|
ApplicationJson = "application/json"
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
|
*/
|
||||||
|
export declare function getProxyUrl(serverUrl: string): string;
|
||||||
|
export declare class HttpClientError extends Error {
|
||||||
|
constructor(message: string, statusCode: number);
|
||||||
|
statusCode: number;
|
||||||
|
result?: any;
|
||||||
|
}
|
||||||
|
export declare class HttpClientResponse {
|
||||||
|
constructor(message: http.IncomingMessage);
|
||||||
|
message: http.IncomingMessage;
|
||||||
|
readBody(): Promise<string>;
|
||||||
|
}
|
||||||
|
export declare function isHttps(requestUrl: string): boolean;
|
||||||
|
export declare class HttpClient {
|
||||||
|
userAgent: string | undefined;
|
||||||
|
handlers: ifm.RequestHandler[];
|
||||||
|
requestOptions: ifm.RequestOptions | undefined;
|
||||||
|
private _ignoreSslError;
|
||||||
|
private _socketTimeout;
|
||||||
|
private _allowRedirects;
|
||||||
|
private _allowRedirectDowngrade;
|
||||||
|
private _maxRedirects;
|
||||||
|
private _allowRetries;
|
||||||
|
private _maxRetries;
|
||||||
|
private _agent;
|
||||||
|
private _proxyAgent;
|
||||||
|
private _keepAlive;
|
||||||
|
private _disposed;
|
||||||
|
constructor(userAgent?: string, handlers?: ifm.RequestHandler[], requestOptions?: ifm.RequestOptions);
|
||||||
|
options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
head(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
/**
|
||||||
|
* Gets a typed object from an endpoint
|
||||||
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||||
|
*/
|
||||||
|
getJson<T>(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<ifm.TypedResponse<T>>;
|
||||||
|
postJson<T>(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise<ifm.TypedResponse<T>>;
|
||||||
|
putJson<T>(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise<ifm.TypedResponse<T>>;
|
||||||
|
patchJson<T>(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise<ifm.TypedResponse<T>>;
|
||||||
|
/**
|
||||||
|
* Makes a raw http request.
|
||||||
|
* All other methods such as get, post, patch, and request ultimately call this.
|
||||||
|
* Prefer get, del, post and patch
|
||||||
|
*/
|
||||||
|
request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream | null, headers?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
/**
|
||||||
|
* Needs to be called if keepAlive is set to true in request options.
|
||||||
|
*/
|
||||||
|
dispose(): void;
|
||||||
|
/**
|
||||||
|
* Raw request.
|
||||||
|
* @param info
|
||||||
|
* @param data
|
||||||
|
*/
|
||||||
|
requestRaw(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null): Promise<HttpClientResponse>;
|
||||||
|
/**
|
||||||
|
* Raw request with callback.
|
||||||
|
* @param info
|
||||||
|
* @param data
|
||||||
|
* @param onResult
|
||||||
|
*/
|
||||||
|
requestRawWithCallback(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null, onResult: (err?: Error, res?: HttpClientResponse) => void): void;
|
||||||
|
/**
|
||||||
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
||||||
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
||||||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
|
*/
|
||||||
|
getAgent(serverUrl: string): http.Agent;
|
||||||
|
private _prepareRequest;
|
||||||
|
private _mergeHeaders;
|
||||||
|
private _getExistingOrDefaultHeader;
|
||||||
|
private _getAgent;
|
||||||
|
private _performExponentialBackoff;
|
||||||
|
private _processResponse;
|
||||||
|
}
|
||||||
605
node_modules/@actions/http-client/lib/index.js
generated
vendored
Normal file
605
node_modules/@actions/http-client/lib/index.js
generated
vendored
Normal file
@@ -0,0 +1,605 @@
|
|||||||
|
"use strict";
|
||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
|
||||||
|
const http = __importStar(require("http"));
|
||||||
|
const https = __importStar(require("https"));
|
||||||
|
const pm = __importStar(require("./proxy"));
|
||||||
|
const tunnel = __importStar(require("tunnel"));
|
||||||
|
var HttpCodes;
|
||||||
|
(function (HttpCodes) {
|
||||||
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
||||||
|
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
|
||||||
|
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
|
||||||
|
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
|
||||||
|
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
|
||||||
|
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
|
||||||
|
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
|
||||||
|
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
|
||||||
|
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
|
||||||
|
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
|
||||||
|
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
|
||||||
|
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
|
||||||
|
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
|
||||||
|
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
|
||||||
|
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
|
||||||
|
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
|
||||||
|
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
|
||||||
|
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
|
||||||
|
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
||||||
|
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
||||||
|
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
||||||
|
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
||||||
|
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
||||||
|
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
||||||
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
||||||
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
||||||
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
||||||
|
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
||||||
|
var Headers;
|
||||||
|
(function (Headers) {
|
||||||
|
Headers["Accept"] = "accept";
|
||||||
|
Headers["ContentType"] = "content-type";
|
||||||
|
})(Headers = exports.Headers || (exports.Headers = {}));
|
||||||
|
var MediaTypes;
|
||||||
|
(function (MediaTypes) {
|
||||||
|
MediaTypes["ApplicationJson"] = "application/json";
|
||||||
|
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
||||||
|
/**
|
||||||
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
|
*/
|
||||||
|
function getProxyUrl(serverUrl) {
|
||||||
|
const proxyUrl = pm.getProxyUrl(new URL(serverUrl));
|
||||||
|
return proxyUrl ? proxyUrl.href : '';
|
||||||
|
}
|
||||||
|
exports.getProxyUrl = getProxyUrl;
|
||||||
|
const HttpRedirectCodes = [
|
||||||
|
HttpCodes.MovedPermanently,
|
||||||
|
HttpCodes.ResourceMoved,
|
||||||
|
HttpCodes.SeeOther,
|
||||||
|
HttpCodes.TemporaryRedirect,
|
||||||
|
HttpCodes.PermanentRedirect
|
||||||
|
];
|
||||||
|
const HttpResponseRetryCodes = [
|
||||||
|
HttpCodes.BadGateway,
|
||||||
|
HttpCodes.ServiceUnavailable,
|
||||||
|
HttpCodes.GatewayTimeout
|
||||||
|
];
|
||||||
|
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
||||||
|
const ExponentialBackoffCeiling = 10;
|
||||||
|
const ExponentialBackoffTimeSlice = 5;
|
||||||
|
class HttpClientError extends Error {
|
||||||
|
constructor(message, statusCode) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'HttpClientError';
|
||||||
|
this.statusCode = statusCode;
|
||||||
|
Object.setPrototypeOf(this, HttpClientError.prototype);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.HttpClientError = HttpClientError;
|
||||||
|
class HttpClientResponse {
|
||||||
|
constructor(message) {
|
||||||
|
this.message = message;
|
||||||
|
}
|
||||||
|
readBody() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let output = Buffer.alloc(0);
|
||||||
|
this.message.on('data', (chunk) => {
|
||||||
|
output = Buffer.concat([output, chunk]);
|
||||||
|
});
|
||||||
|
this.message.on('end', () => {
|
||||||
|
resolve(output.toString());
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.HttpClientResponse = HttpClientResponse;
|
||||||
|
function isHttps(requestUrl) {
|
||||||
|
const parsedUrl = new URL(requestUrl);
|
||||||
|
return parsedUrl.protocol === 'https:';
|
||||||
|
}
|
||||||
|
exports.isHttps = isHttps;
|
||||||
|
class HttpClient {
|
||||||
|
constructor(userAgent, handlers, requestOptions) {
|
||||||
|
this._ignoreSslError = false;
|
||||||
|
this._allowRedirects = true;
|
||||||
|
this._allowRedirectDowngrade = false;
|
||||||
|
this._maxRedirects = 50;
|
||||||
|
this._allowRetries = false;
|
||||||
|
this._maxRetries = 1;
|
||||||
|
this._keepAlive = false;
|
||||||
|
this._disposed = false;
|
||||||
|
this.userAgent = userAgent;
|
||||||
|
this.handlers = handlers || [];
|
||||||
|
this.requestOptions = requestOptions;
|
||||||
|
if (requestOptions) {
|
||||||
|
if (requestOptions.ignoreSslError != null) {
|
||||||
|
this._ignoreSslError = requestOptions.ignoreSslError;
|
||||||
|
}
|
||||||
|
this._socketTimeout = requestOptions.socketTimeout;
|
||||||
|
if (requestOptions.allowRedirects != null) {
|
||||||
|
this._allowRedirects = requestOptions.allowRedirects;
|
||||||
|
}
|
||||||
|
if (requestOptions.allowRedirectDowngrade != null) {
|
||||||
|
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
|
||||||
|
}
|
||||||
|
if (requestOptions.maxRedirects != null) {
|
||||||
|
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
|
||||||
|
}
|
||||||
|
if (requestOptions.keepAlive != null) {
|
||||||
|
this._keepAlive = requestOptions.keepAlive;
|
||||||
|
}
|
||||||
|
if (requestOptions.allowRetries != null) {
|
||||||
|
this._allowRetries = requestOptions.allowRetries;
|
||||||
|
}
|
||||||
|
if (requestOptions.maxRetries != null) {
|
||||||
|
this._maxRetries = requestOptions.maxRetries;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
options(requestUrl, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
get(requestUrl, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request('GET', requestUrl, null, additionalHeaders || {});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
del(requestUrl, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
post(requestUrl, data, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request('POST', requestUrl, data, additionalHeaders || {});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
patch(requestUrl, data, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
put(requestUrl, data, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request('PUT', requestUrl, data, additionalHeaders || {});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
head(requestUrl, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
sendStream(verb, requestUrl, stream, additionalHeaders) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return this.request(verb, requestUrl, stream, additionalHeaders);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets a typed object from an endpoint
|
||||||
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||||
|
*/
|
||||||
|
getJson(requestUrl, additionalHeaders = {}) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
const res = yield this.get(requestUrl, additionalHeaders);
|
||||||
|
return this._processResponse(res, this.requestOptions);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
postJson(requestUrl, obj, additionalHeaders = {}) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const data = JSON.stringify(obj, null, 2);
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||||
|
const res = yield this.post(requestUrl, data, additionalHeaders);
|
||||||
|
return this._processResponse(res, this.requestOptions);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
putJson(requestUrl, obj, additionalHeaders = {}) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const data = JSON.stringify(obj, null, 2);
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||||
|
const res = yield this.put(requestUrl, data, additionalHeaders);
|
||||||
|
return this._processResponse(res, this.requestOptions);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
patchJson(requestUrl, obj, additionalHeaders = {}) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const data = JSON.stringify(obj, null, 2);
|
||||||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||||
|
const res = yield this.patch(requestUrl, data, additionalHeaders);
|
||||||
|
return this._processResponse(res, this.requestOptions);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Makes a raw http request.
|
||||||
|
* All other methods such as get, post, patch, and request ultimately call this.
|
||||||
|
* Prefer get, del, post and patch
|
||||||
|
*/
|
||||||
|
request(verb, requestUrl, data, headers) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (this._disposed) {
|
||||||
|
throw new Error('Client has already been disposed.');
|
||||||
|
}
|
||||||
|
const parsedUrl = new URL(requestUrl);
|
||||||
|
let info = this._prepareRequest(verb, parsedUrl, headers);
|
||||||
|
// Only perform retries on reads since writes may not be idempotent.
|
||||||
|
const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)
|
||||||
|
? this._maxRetries + 1
|
||||||
|
: 1;
|
||||||
|
let numTries = 0;
|
||||||
|
let response;
|
||||||
|
do {
|
||||||
|
response = yield this.requestRaw(info, data);
|
||||||
|
// Check if it's an authentication challenge
|
||||||
|
if (response &&
|
||||||
|
response.message &&
|
||||||
|
response.message.statusCode === HttpCodes.Unauthorized) {
|
||||||
|
let authenticationHandler;
|
||||||
|
for (const handler of this.handlers) {
|
||||||
|
if (handler.canHandleAuthentication(response)) {
|
||||||
|
authenticationHandler = handler;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (authenticationHandler) {
|
||||||
|
return authenticationHandler.handleAuthentication(this, info, data);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// We have received an unauthorized response but have no handlers to handle it.
|
||||||
|
// Let the response return to the caller.
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let redirectsRemaining = this._maxRedirects;
|
||||||
|
while (response.message.statusCode &&
|
||||||
|
HttpRedirectCodes.includes(response.message.statusCode) &&
|
||||||
|
this._allowRedirects &&
|
||||||
|
redirectsRemaining > 0) {
|
||||||
|
const redirectUrl = response.message.headers['location'];
|
||||||
|
if (!redirectUrl) {
|
||||||
|
// if there's no location to redirect to, we won't
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
const parsedRedirectUrl = new URL(redirectUrl);
|
||||||
|
if (parsedUrl.protocol === 'https:' &&
|
||||||
|
parsedUrl.protocol !== parsedRedirectUrl.protocol &&
|
||||||
|
!this._allowRedirectDowngrade) {
|
||||||
|
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
||||||
|
}
|
||||||
|
// we need to finish reading the response before reassigning response
|
||||||
|
// which will leak the open socket.
|
||||||
|
yield response.readBody();
|
||||||
|
// strip authorization header if redirected to a different hostname
|
||||||
|
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
||||||
|
for (const header in headers) {
|
||||||
|
// header names are case insensitive
|
||||||
|
if (header.toLowerCase() === 'authorization') {
|
||||||
|
delete headers[header];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// let's make the request with the new redirectUrl
|
||||||
|
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
||||||
|
response = yield this.requestRaw(info, data);
|
||||||
|
redirectsRemaining--;
|
||||||
|
}
|
||||||
|
if (!response.message.statusCode ||
|
||||||
|
!HttpResponseRetryCodes.includes(response.message.statusCode)) {
|
||||||
|
// If not a retry code, return immediately instead of retrying
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
numTries += 1;
|
||||||
|
if (numTries < maxTries) {
|
||||||
|
yield response.readBody();
|
||||||
|
yield this._performExponentialBackoff(numTries);
|
||||||
|
}
|
||||||
|
} while (numTries < maxTries);
|
||||||
|
return response;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Needs to be called if keepAlive is set to true in request options.
|
||||||
|
*/
|
||||||
|
dispose() {
|
||||||
|
if (this._agent) {
|
||||||
|
this._agent.destroy();
|
||||||
|
}
|
||||||
|
this._disposed = true;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Raw request.
|
||||||
|
* @param info
|
||||||
|
* @param data
|
||||||
|
*/
|
||||||
|
requestRaw(info, data) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
function callbackForResult(err, res) {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
}
|
||||||
|
else if (!res) {
|
||||||
|
// If `err` is not passed, then `res` must be passed.
|
||||||
|
reject(new Error('Unknown error'));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resolve(res);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.requestRawWithCallback(info, data, callbackForResult);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Raw request with callback.
|
||||||
|
* @param info
|
||||||
|
* @param data
|
||||||
|
* @param onResult
|
||||||
|
*/
|
||||||
|
requestRawWithCallback(info, data, onResult) {
|
||||||
|
if (typeof data === 'string') {
|
||||||
|
if (!info.options.headers) {
|
||||||
|
info.options.headers = {};
|
||||||
|
}
|
||||||
|
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
||||||
|
}
|
||||||
|
let callbackCalled = false;
|
||||||
|
function handleResult(err, res) {
|
||||||
|
if (!callbackCalled) {
|
||||||
|
callbackCalled = true;
|
||||||
|
onResult(err, res);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const req = info.httpModule.request(info.options, (msg) => {
|
||||||
|
const res = new HttpClientResponse(msg);
|
||||||
|
handleResult(undefined, res);
|
||||||
|
});
|
||||||
|
let socket;
|
||||||
|
req.on('socket', sock => {
|
||||||
|
socket = sock;
|
||||||
|
});
|
||||||
|
// If we ever get disconnected, we want the socket to timeout eventually
|
||||||
|
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
|
||||||
|
if (socket) {
|
||||||
|
socket.end();
|
||||||
|
}
|
||||||
|
handleResult(new Error(`Request timeout: ${info.options.path}`));
|
||||||
|
});
|
||||||
|
req.on('error', function (err) {
|
||||||
|
// err has statusCode property
|
||||||
|
// res should have headers
|
||||||
|
handleResult(err);
|
||||||
|
});
|
||||||
|
if (data && typeof data === 'string') {
|
||||||
|
req.write(data, 'utf8');
|
||||||
|
}
|
||||||
|
if (data && typeof data !== 'string') {
|
||||||
|
data.on('close', function () {
|
||||||
|
req.end();
|
||||||
|
});
|
||||||
|
data.pipe(req);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
req.end();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
||||||
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
||||||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||||
|
*/
|
||||||
|
getAgent(serverUrl) {
|
||||||
|
const parsedUrl = new URL(serverUrl);
|
||||||
|
return this._getAgent(parsedUrl);
|
||||||
|
}
|
||||||
|
_prepareRequest(method, requestUrl, headers) {
|
||||||
|
const info = {};
|
||||||
|
info.parsedUrl = requestUrl;
|
||||||
|
const usingSsl = info.parsedUrl.protocol === 'https:';
|
||||||
|
info.httpModule = usingSsl ? https : http;
|
||||||
|
const defaultPort = usingSsl ? 443 : 80;
|
||||||
|
info.options = {};
|
||||||
|
info.options.host = info.parsedUrl.hostname;
|
||||||
|
info.options.port = info.parsedUrl.port
|
||||||
|
? parseInt(info.parsedUrl.port)
|
||||||
|
: defaultPort;
|
||||||
|
info.options.path =
|
||||||
|
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||||
|
info.options.method = method;
|
||||||
|
info.options.headers = this._mergeHeaders(headers);
|
||||||
|
if (this.userAgent != null) {
|
||||||
|
info.options.headers['user-agent'] = this.userAgent;
|
||||||
|
}
|
||||||
|
info.options.agent = this._getAgent(info.parsedUrl);
|
||||||
|
// gives handlers an opportunity to participate
|
||||||
|
if (this.handlers) {
|
||||||
|
for (const handler of this.handlers) {
|
||||||
|
handler.prepareRequest(info.options);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return info;
|
||||||
|
}
|
||||||
|
_mergeHeaders(headers) {
|
||||||
|
if (this.requestOptions && this.requestOptions.headers) {
|
||||||
|
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));
|
||||||
|
}
|
||||||
|
return lowercaseKeys(headers || {});
|
||||||
|
}
|
||||||
|
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
||||||
|
let clientHeader;
|
||||||
|
if (this.requestOptions && this.requestOptions.headers) {
|
||||||
|
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
||||||
|
}
|
||||||
|
return additionalHeaders[header] || clientHeader || _default;
|
||||||
|
}
|
||||||
|
_getAgent(parsedUrl) {
|
||||||
|
let agent;
|
||||||
|
const proxyUrl = pm.getProxyUrl(parsedUrl);
|
||||||
|
const useProxy = proxyUrl && proxyUrl.hostname;
|
||||||
|
if (this._keepAlive && useProxy) {
|
||||||
|
agent = this._proxyAgent;
|
||||||
|
}
|
||||||
|
if (this._keepAlive && !useProxy) {
|
||||||
|
agent = this._agent;
|
||||||
|
}
|
||||||
|
// if agent is already assigned use that agent.
|
||||||
|
if (agent) {
|
||||||
|
return agent;
|
||||||
|
}
|
||||||
|
const usingSsl = parsedUrl.protocol === 'https:';
|
||||||
|
let maxSockets = 100;
|
||||||
|
if (this.requestOptions) {
|
||||||
|
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
|
||||||
|
}
|
||||||
|
// This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
|
||||||
|
if (proxyUrl && proxyUrl.hostname) {
|
||||||
|
const agentOptions = {
|
||||||
|
maxSockets,
|
||||||
|
keepAlive: this._keepAlive,
|
||||||
|
proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {
|
||||||
|
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
|
||||||
|
})), { host: proxyUrl.hostname, port: proxyUrl.port })
|
||||||
|
};
|
||||||
|
let tunnelAgent;
|
||||||
|
const overHttps = proxyUrl.protocol === 'https:';
|
||||||
|
if (usingSsl) {
|
||||||
|
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
|
||||||
|
}
|
||||||
|
agent = tunnelAgent(agentOptions);
|
||||||
|
this._proxyAgent = agent;
|
||||||
|
}
|
||||||
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
||||||
|
if (this._keepAlive && !agent) {
|
||||||
|
const options = { keepAlive: this._keepAlive, maxSockets };
|
||||||
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
||||||
|
this._agent = agent;
|
||||||
|
}
|
||||||
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
||||||
|
if (!agent) {
|
||||||
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
||||||
|
}
|
||||||
|
if (usingSsl && this._ignoreSslError) {
|
||||||
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||||
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||||
|
// we have to cast it to any and change it directly
|
||||||
|
agent.options = Object.assign(agent.options || {}, {
|
||||||
|
rejectUnauthorized: false
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return agent;
|
||||||
|
}
|
||||||
|
_performExponentialBackoff(retryNumber) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
||||||
|
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
|
||||||
|
return new Promise(resolve => setTimeout(() => resolve(), ms));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_processResponse(res, options) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const statusCode = res.message.statusCode || 0;
|
||||||
|
const response = {
|
||||||
|
statusCode,
|
||||||
|
result: null,
|
||||||
|
headers: {}
|
||||||
|
};
|
||||||
|
// not found leads to null obj returned
|
||||||
|
if (statusCode === HttpCodes.NotFound) {
|
||||||
|
resolve(response);
|
||||||
|
}
|
||||||
|
// get the result from the body
|
||||||
|
function dateTimeDeserializer(key, value) {
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
const a = new Date(value);
|
||||||
|
if (!isNaN(a.valueOf())) {
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
let obj;
|
||||||
|
let contents;
|
||||||
|
try {
|
||||||
|
contents = yield res.readBody();
|
||||||
|
if (contents && contents.length > 0) {
|
||||||
|
if (options && options.deserializeDates) {
|
||||||
|
obj = JSON.parse(contents, dateTimeDeserializer);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
obj = JSON.parse(contents);
|
||||||
|
}
|
||||||
|
response.result = obj;
|
||||||
|
}
|
||||||
|
response.headers = res.message.headers;
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
// Invalid resource (contents not json); leaving result obj null
|
||||||
|
}
|
||||||
|
// note that 3xx redirects are handled by the http layer.
|
||||||
|
if (statusCode > 299) {
|
||||||
|
let msg;
|
||||||
|
// if exception/error in body, attempt to get better error
|
||||||
|
if (obj && obj.message) {
|
||||||
|
msg = obj.message;
|
||||||
|
}
|
||||||
|
else if (contents && contents.length > 0) {
|
||||||
|
// it may be the case that the exception is in the body message as string
|
||||||
|
msg = contents;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
msg = `Failed request: (${statusCode})`;
|
||||||
|
}
|
||||||
|
const err = new HttpClientError(msg, statusCode);
|
||||||
|
err.result = response.result;
|
||||||
|
reject(err);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resolve(response);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.HttpClient = HttpClient;
|
||||||
|
const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||||
|
//# sourceMappingURL=index.js.map
|
||||||
1
node_modules/@actions/http-client/lib/index.js.map
generated
vendored
Normal file
1
node_modules/@actions/http-client/lib/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
44
node_modules/@actions/http-client/lib/interfaces.d.ts
generated
vendored
Normal file
44
node_modules/@actions/http-client/lib/interfaces.d.ts
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
import * as http from 'http';
|
||||||
|
import * as https from 'https';
|
||||||
|
import { HttpClientResponse } from './index';
|
||||||
|
export interface HttpClient {
|
||||||
|
options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream, headers: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
|
||||||
|
requestRaw(info: RequestInfo, data: string | NodeJS.ReadableStream): Promise<HttpClientResponse>;
|
||||||
|
requestRawWithCallback(info: RequestInfo, data: string | NodeJS.ReadableStream, onResult: (err?: Error, res?: HttpClientResponse) => void): void;
|
||||||
|
}
|
||||||
|
export interface RequestHandler {
|
||||||
|
prepareRequest(options: http.RequestOptions): void;
|
||||||
|
canHandleAuthentication(response: HttpClientResponse): boolean;
|
||||||
|
handleAuthentication(httpClient: HttpClient, requestInfo: RequestInfo, data: string | NodeJS.ReadableStream | null): Promise<HttpClientResponse>;
|
||||||
|
}
|
||||||
|
export interface RequestInfo {
|
||||||
|
options: http.RequestOptions;
|
||||||
|
parsedUrl: URL;
|
||||||
|
httpModule: typeof http | typeof https;
|
||||||
|
}
|
||||||
|
export interface RequestOptions {
|
||||||
|
headers?: http.OutgoingHttpHeaders;
|
||||||
|
socketTimeout?: number;
|
||||||
|
ignoreSslError?: boolean;
|
||||||
|
allowRedirects?: boolean;
|
||||||
|
allowRedirectDowngrade?: boolean;
|
||||||
|
maxRedirects?: number;
|
||||||
|
maxSockets?: number;
|
||||||
|
keepAlive?: boolean;
|
||||||
|
deserializeDates?: boolean;
|
||||||
|
allowRetries?: boolean;
|
||||||
|
maxRetries?: number;
|
||||||
|
}
|
||||||
|
export interface TypedResponse<T> {
|
||||||
|
statusCode: number;
|
||||||
|
result: T | null;
|
||||||
|
headers: http.IncomingHttpHeaders;
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user