mirror of
https://github.com/github/codeql-action.git
synced 2025-12-10 17:54:36 +08:00
Compare commits
147 Commits
codeql-bun
...
codeql-bun
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cbabe47a0b | ||
|
|
f8a48f464d | ||
|
|
f6f23f8671 | ||
|
|
c2a7379048 | ||
|
|
cd783c8a29 | ||
|
|
300c8b6dcb | ||
|
|
faa9ba7363 | ||
|
|
d2a0fc83dc | ||
|
|
71112ab35d | ||
|
|
e677af3fd0 | ||
|
|
848e5140d4 | ||
|
|
e7fe6da378 | ||
|
|
2159631658 | ||
|
|
9de1702400 | ||
|
|
efded22908 | ||
|
|
5602bd50bf | ||
|
|
2f4be8e34b | ||
|
|
9763bdd6ec | ||
|
|
00d4d60204 | ||
|
|
e5d84de18b | ||
|
|
ea1acc573a | ||
|
|
79ea6d6a7c | ||
|
|
3e50d096f8 | ||
|
|
cca1cfdacf | ||
|
|
cdea582765 | ||
|
|
3e59dee9e2 | ||
|
|
249c7ffce1 | ||
|
|
254816c2d2 | ||
|
|
6d62c245ec | ||
|
|
5e87034b3b | ||
|
|
621e0794ac | ||
|
|
d6499fad61 | ||
|
|
04671efa1d | ||
|
|
e1f05902cd | ||
|
|
f9e96fa857 | ||
|
|
14a5537e13 | ||
|
|
d3eb4974a3 | ||
|
|
39216d10d3 | ||
|
|
265a7db16a | ||
|
|
f623d4cec3 | ||
|
|
eacec3646a | ||
|
|
e0e2abc1a5 | ||
|
|
716b5980cd | ||
|
|
1d83f2a0bc | ||
|
|
ce77f88627 | ||
|
|
a777b51ef7 | ||
|
|
88fbabe21d | ||
|
|
eeb215b041 | ||
|
|
5f53256358 | ||
|
|
25a5103778 | ||
|
|
0782554948 | ||
|
|
705f634a1d | ||
|
|
b7b7607959 | ||
|
|
7bcc6564d4 | ||
|
|
b661ef1697 | ||
|
|
6ad00fd084 | ||
|
|
fccdee04ba | ||
|
|
e694ca6192 | ||
|
|
67d11b5928 | ||
|
|
924a64d2e0 | ||
|
|
45dc27d3c1 | ||
|
|
cbed0358c6 | ||
|
|
a8cf6f42c2 | ||
|
|
eebe7c46f1 | ||
|
|
dc32d5448f | ||
|
|
fac22de4f9 | ||
|
|
0a1efd7f45 | ||
|
|
043e3deaeb | ||
|
|
0dbcb55617 | ||
|
|
00c59b98ce | ||
|
|
7069ada3ed | ||
|
|
dd1f9a96d8 | ||
|
|
546b30f35a | ||
|
|
d1dde03d7a | ||
|
|
f44219c94b | ||
|
|
bdaac951f7 | ||
|
|
a82f53a364 | ||
|
|
f721f011bf | ||
|
|
c82e09aa41 | ||
|
|
460d053698 | ||
|
|
3bf14e85d8 | ||
|
|
13a9d6c442 | ||
|
|
dd65833ab6 | ||
|
|
c2d9e4b48f | ||
|
|
e095058bfa | ||
|
|
2c99f99c4a | ||
|
|
bcd7e6896f | ||
|
|
0b242db78f | ||
|
|
c897659213 | ||
|
|
8b902e1723 | ||
|
|
26567f6a49 | ||
|
|
dbf7ac4b37 | ||
|
|
077f7b2532 | ||
|
|
a392055010 | ||
|
|
0aea878963 | ||
|
|
bca71988d3 | ||
|
|
02e1cdcd36 | ||
|
|
4860ed1ad4 | ||
|
|
3e36cddb07 | ||
|
|
b9bd459b70 | ||
|
|
215c4f5ff5 | ||
|
|
4eef7ef32c | ||
|
|
e0b9b9a248 | ||
|
|
823bb21bbb | ||
|
|
49fc4c9b40 | ||
|
|
21a786fda0 | ||
|
|
316ad9d919 | ||
|
|
a627e9fa50 | ||
|
|
160021fe53 | ||
|
|
3f2269bf58 | ||
|
|
2ecc17d74f | ||
|
|
9b506fed7c | ||
|
|
2803f4a792 | ||
|
|
720bf9d157 | ||
|
|
bbf0a22e84 | ||
|
|
d7b5c618a4 | ||
|
|
37a4db94ad | ||
|
|
6a98a4b500 | ||
|
|
ea8a175a94 | ||
|
|
f360da772a | ||
|
|
ea169430d2 | ||
|
|
375c14729e | ||
|
|
0442e71a2a | ||
|
|
3832953584 | ||
|
|
3ce10aec2e | ||
|
|
c4b0d49ea3 | ||
|
|
5581e08a65 | ||
|
|
df5cf240b0 | ||
|
|
ae2843216b | ||
|
|
5156a89668 | ||
|
|
8f0825e9c0 | ||
|
|
9a44540e25 | ||
|
|
ff3272d4e1 | ||
|
|
56c7489b94 | ||
|
|
3ba4184b13 | ||
|
|
bc31f604d3 | ||
|
|
4293754ed2 | ||
|
|
70b730eb7d | ||
|
|
2905689d8a | ||
|
|
1d123b770b | ||
|
|
9661171991 | ||
|
|
e04751618e | ||
|
|
e891551dd4 | ||
|
|
bd48dc5be5 | ||
|
|
a53b8d0ed1 | ||
|
|
22747bcb77 | ||
|
|
531c6ba7c8 |
1
.github/prepare-test/action.yml
vendored
1
.github/prepare-test/action.yml
vendored
@@ -28,7 +28,6 @@ runs:
|
||||
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz"
|
||||
elif [[ ${{ inputs.version }} == *"stable"* ]]; then
|
||||
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
||||
echo "Hello $VERSION"
|
||||
echo "::set-output name=tools-url::https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz"
|
||||
elif [[ ${{ inputs.version }} == "latest" ]]; then
|
||||
echo "::set-output name=tools-url::latest"
|
||||
|
||||
77
.github/workflows/__debug-artifacts.yml
generated
vendored
Normal file
77
.github/workflows/__debug-artifacts.yml
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Debug artifact upload
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
debug-artifacts:
|
||||
strategy:
|
||||
matrix:
|
||||
version:
|
||||
- stable-20201028
|
||||
- stable-20210319
|
||||
- stable-20210809
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
name: Debug artifact upload
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
debug: true
|
||||
debug-artifact-name: my-debug-artifacts
|
||||
debug-database-name: my-db
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: my-debug-artifacts-${{ matrix.os }}-${{ matrix.version }}
|
||||
- shell: bash
|
||||
run: |
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
for language in $LANGUAGES; do
|
||||
echo "Checking $language"
|
||||
if [[ ! -f "$language.sarif" ]] ; then
|
||||
echo "Missing a SARIF file for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -f "my-db-$language.zip" ]] ; then
|
||||
echo "Missing a database bundle for $language"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -d "$language/log" ]] ; then
|
||||
echo "Missing logs for $language"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
63
.github/workflows/__extractor-ram-threads.yml
generated
vendored
Normal file
63
.github/workflows/__extractor-ram-threads.yml
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Extractor ram and threads options test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
extractor-ram-threads:
|
||||
strategy:
|
||||
matrix:
|
||||
version: [latest]
|
||||
os: [ubuntu-latest]
|
||||
name: Extractor ram and threads options test
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: java
|
||||
ram: 230
|
||||
threads: 1
|
||||
- name: Assert Results
|
||||
shell: bash
|
||||
run: |
|
||||
if [ "${CODEQL_RAM}" != "230" ]; then
|
||||
echo "CODEQL_RAM is '${CODEQL_RAM}' instead of 230"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_EXTRACTOR_JAVA_RAM}" != "230" ]; then
|
||||
echo "CODEQL_EXTRACTOR_JAVA_RAM is '${CODEQL_EXTRACTOR_JAVA_RAM}' instead of 230"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_THREADS}" != "1" ]; then
|
||||
echo "CODEQL_THREADS is '${CODEQL_THREADS}' instead of 1"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_EXTRACTOR_JAVA_THREADS}" != "1" ]; then
|
||||
echo "CODEQL_EXTRACTOR_JAVA_THREADS is '${CODEQL_EXTRACTOR_JAVA_THREADS}' instead of 1"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
88
.github/workflows/__unset-environment.yml
generated
vendored
Normal file
88
.github/workflows/__unset-environment.yml
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Test unsetting environment variables
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v1
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
unset-environment:
|
||||
strategy:
|
||||
matrix:
|
||||
version:
|
||||
- stable-20201028
|
||||
- stable-20210319
|
||||
- stable-20210809
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
os: [ubuntu-latest]
|
||||
name: Test unsetting environment variables
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- shell: bash
|
||||
run: |
|
||||
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
|
||||
if [[ ! -d $CPP_DB ]] || [[ ! $CPP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for CPP, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
CSHARP_DB=${{ fromJson(steps.analysis.outputs.db-locations).csharp }}
|
||||
if [[ ! -d $CSHARP_DB ]] || [[ ! $CSHARP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for C Sharp, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
GO_DB=${{ fromJson(steps.analysis.outputs.db-locations).go }}
|
||||
if [[ ! -d $GO_DB ]] || [[ ! $GO_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Go, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
JAVA_DB=${{ fromJson(steps.analysis.outputs.db-locations).java }}
|
||||
if [[ ! -d $JAVA_DB ]] || [[ ! $JAVA_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Java, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
JAVASCRIPT_DB=${{ fromJson(steps.analysis.outputs.db-locations).javascript }}
|
||||
if [[ ! -d $JAVASCRIPT_DB ]] || [[ ! $JAVASCRIPT_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Javascript, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
PYTHON_DB=${{ fromJson(steps.analysis.outputs.db-locations).python }}
|
||||
if [[ ! -d $PYTHON_DB ]] || [[ ! $PYTHON_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Python, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
39
.github/workflows/pr-checks.yml
vendored
39
.github/workflows/pr-checks.yml
vendored
@@ -393,3 +393,42 @@ jobs:
|
||||
# Deliberately don't use TEST_MODE here. This is specifically testing
|
||||
# the compatibility with the API.
|
||||
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
runner-extractor-ram-threads-options:
|
||||
name: Runner ubuntu extractor RAM and threads options
|
||||
needs: [check-js, check-node-modules]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
cd runner
|
||||
npm install
|
||||
npm run build-runner
|
||||
|
||||
- name: Run init
|
||||
run: |
|
||||
runner/dist/codeql-runner-linux init --ram=230 --threads=1 --repository $GITHUB_REPOSITORY --languages java --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
|
||||
|
||||
- name: Assert Results
|
||||
shell: bash
|
||||
run: |
|
||||
. ./codeql-runner/codeql-env.sh
|
||||
if [ "${CODEQL_RAM}" != "230" ]; then
|
||||
echo "CODEQL_RAM is '${CODEQL_RAM}' instead of 230"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_EXTRACTOR_JAVA_RAM}" != "230" ]; then
|
||||
echo "CODEQL_EXTRACTOR_JAVA_RAM is '${CODEQL_EXTRACTOR_JAVA_RAM}' instead of 230"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_THREADS}" != "1" ]; then
|
||||
echo "CODEQL_THREADS is '${CODEQL_THREADS}' instead of 1"
|
||||
exit 1
|
||||
fi
|
||||
if [ "${CODEQL_EXTRACTOR_JAVA_THREADS}" != "1" ]; then
|
||||
echo "CODEQL_EXTRACTOR_JAVA_THREADS is '${CODEQL_EXTRACTOR_JAVA_THREADS}' instead of 1"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
2
.github/workflows/update-release-branch.yml
vendored
2
.github/workflows/update-release-branch.yml
vendored
@@ -1,7 +1,5 @@
|
||||
name: Update release branch
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 9 * * 1
|
||||
repository_dispatch:
|
||||
# Example of how to trigger this:
|
||||
# curl -H "Authorization: Bearer <token>" -X POST https://api.github.com/repos/github/codeql-action/dispatches -d '{"event_type":"update-release-branch"}'
|
||||
|
||||
31
CHANGELOG.md
31
CHANGELOG.md
@@ -4,6 +4,37 @@
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.0.27 - 11 Jan 2022
|
||||
|
||||
- The `analyze` and `upload-sarif` actions will now wait up to 2 minutes for processing to complete after they have uploaded the results so they can report any processing errors that occurred. This behavior can be disabled by setting the `wait-for-processing` action input to `"false"`.
|
||||
|
||||
|
||||
## 1.0.26 - 10 Dec 2021
|
||||
|
||||
- Update default CodeQL bundle version to 2.7.3. [#842](https://github.com/github/codeql-action/pull/842)
|
||||
|
||||
## 1.0.25 - 06 Dec 2021
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.0.24 - 23 Nov 2021
|
||||
|
||||
- Update default CodeQL bundle version to 2.7.2. [#827](https://github.com/github/codeql-action/pull/827)
|
||||
|
||||
## 1.0.23 - 16 Nov 2021
|
||||
|
||||
- The `upload-sarif` action now allows multiple uploads in a single job, as long as they have different categories. [#801](https://github.com/github/codeql-action/pull/801)
|
||||
- Update default CodeQL bundle version to 2.7.1. [#816](https://github.com/github/codeql-action/pull/816)
|
||||
|
||||
## 1.0.22 - 04 Nov 2021
|
||||
|
||||
- The `init` step of the Action now supports `ram` and `threads` inputs to limit resource use of CodeQL extractors. These inputs also serve as defaults to the subsequent `analyze` step, which finalizes the database and executes queries. [#738](https://github.com/github/codeql-action/pull/738)
|
||||
- When used with CodeQL 2.7.1 or above, the Action now includes custom query help in the analysis results uploaded to GitHub code scanning, if available. To add help text for a custom query, create a Markdown file next to the `.ql` file containing the query, using the same base name but the file extension `.md`. [#804](https://github.com/github/codeql-action/pull/804)
|
||||
|
||||
## 1.0.21 - 28 Oct 2021
|
||||
|
||||
- Update default CodeQL bundle version to 2.7.0. [#795](https://github.com/github/codeql-action/pull/795)
|
||||
|
||||
## 1.0.20 - 25 Oct 2021
|
||||
|
||||
No user facing changes.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
name: 'CodeQL: Finish'
|
||||
description: 'Finalize CodeQL database'
|
||||
author: 'GitHub'
|
||||
name: "CodeQL: Finish"
|
||||
description: "Finalize CodeQL database"
|
||||
author: "GitHub"
|
||||
inputs:
|
||||
check_name:
|
||||
description: The name of the check run to add text to.
|
||||
@@ -8,9 +8,9 @@ inputs:
|
||||
output:
|
||||
description: The path of the directory in which to save the SARIF results
|
||||
required: false
|
||||
default: '../results'
|
||||
default: "../results"
|
||||
upload:
|
||||
description: Upload the SARIF file
|
||||
description: Upload the SARIF file to Code Scanning
|
||||
required: false
|
||||
default: "true"
|
||||
cleanup-level:
|
||||
@@ -18,7 +18,12 @@ inputs:
|
||||
required: false
|
||||
default: "brutal"
|
||||
ram:
|
||||
description: Override the amount of memory in MB to be used by CodeQL. By default, almost all the memory of the machine is used.
|
||||
description: >-
|
||||
The amount of memory in MB that can be used by CodeQL for database finalization and query execution.
|
||||
By default, this action will use the same amount of memory as previously set in the "init" action.
|
||||
If the "init" action also does not have an explicit "ram" input, this action will use most of the
|
||||
memory available in the system (which for GitHub-hosted runners is 6GB for Linux, 5.5GB for Windows,
|
||||
and 13GB for macOS).
|
||||
required: false
|
||||
add-snippets:
|
||||
description: Specify whether or not to add code snippets to the output sarif file.
|
||||
@@ -29,7 +34,12 @@ inputs:
|
||||
required: false
|
||||
default: "false"
|
||||
threads:
|
||||
description: The number of threads to be used by CodeQL.
|
||||
description: >-
|
||||
The number of threads that can be used by CodeQL for database finalization and query execution.
|
||||
By default, this action will use the same number of threads as previously set in the "init" action.
|
||||
If the "init" action also does not have an explicit "threads" input, this action will use all the
|
||||
hardware threads available in the system (which for GitHub-hosted runners is 2 for Linux and Windows
|
||||
and 3 for macOS).
|
||||
required: false
|
||||
checkout_path:
|
||||
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
|
||||
@@ -42,6 +52,10 @@ inputs:
|
||||
description: Whether to upload the resulting CodeQL database
|
||||
required: false
|
||||
default: "true"
|
||||
wait-for-processing:
|
||||
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
|
||||
required: true
|
||||
default: "true"
|
||||
token:
|
||||
default: ${{ github.token }}
|
||||
matrix:
|
||||
@@ -50,5 +64,5 @@ outputs:
|
||||
db-locations:
|
||||
description: A map from language to absolute path for each database created by CodeQL.
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: '../lib/analyze-action.js'
|
||||
using: "node12"
|
||||
main: "../lib/analyze-action.js"
|
||||
|
||||
@@ -41,6 +41,34 @@ inputs:
|
||||
source-root:
|
||||
description: Path of the root source code directory, relative to $GITHUB_WORKSPACE.
|
||||
required: false
|
||||
ram:
|
||||
description: >-
|
||||
The amount of memory in MB that can be used by CodeQL extractors.
|
||||
By default, CodeQL extractors will use most of the memory available in the system
|
||||
(which for GitHub-hosted runners is 6GB for Linux, 5.5GB for Windows, and 13GB for macOS).
|
||||
This input also sets the amount of memory that can later be used by the "analyze" action.
|
||||
required: false
|
||||
threads:
|
||||
description: >-
|
||||
The number of threads that can be used by CodeQL extractors.
|
||||
By default, CodeQL extractors will use all the hardware threads available in the system
|
||||
(which for GitHub-hosted runners is 2 for Linux and Windows and 3 for macOS).
|
||||
This input also sets the number of threads that can later be used by the "analyze" action.
|
||||
required: false
|
||||
debug:
|
||||
description: Enable debugging mode. This will result in more output being produced which may be useful when debugging certain issues.
|
||||
required: false
|
||||
default: 'false'
|
||||
debug-artifact-name:
|
||||
description: >-
|
||||
The name of the artifact to store debugging information in.
|
||||
This is only used when debug mode is enabled.
|
||||
required: false
|
||||
debug-database-name:
|
||||
description: >-
|
||||
The name of the database uploaded to the debugging artifact.
|
||||
This is only used when debug mode is enabled.
|
||||
required: false
|
||||
outputs:
|
||||
codeql-path:
|
||||
description: The path of the CodeQL binary used for analysis
|
||||
|
||||
7
lib/actions-util.js
generated
7
lib/actions-util.js
generated
@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
@@ -98,6 +98,7 @@ const getCommitOid = async function (ref = "HEAD") {
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
|
||||
core.info(e.stack || "NO STACK");
|
||||
return (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
}
|
||||
};
|
||||
@@ -574,4 +575,8 @@ async function isAnalyzingDefaultBranch() {
|
||||
return currentRef === defaultBranch;
|
||||
}
|
||||
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
|
||||
function sanitizeArifactName(name) {
|
||||
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
||||
}
|
||||
exports.sanitizeArifactName = sanitizeArifactName;
|
||||
//# sourceMappingURL=actions-util.js.map
|
||||
File diff suppressed because one or more lines are too long
6
lib/actions-util.test.js
generated
6
lib/actions-util.test.js
generated
@@ -440,4 +440,10 @@ on: ["push"]
|
||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("sanitizeArifactName", (t) => {
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello-world_"), "hello-world_");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello`world`"), "helloworld");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello===123"), "hello123");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
|
||||
});
|
||||
//# sourceMappingURL=actions-util.test.js.map
|
||||
File diff suppressed because one or more lines are too long
9
lib/analysis-paths.test.js
generated
9
lib/analysis-paths.test.js
generated
@@ -42,6 +42,9 @@ const util = __importStar(require("./util"));
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
@@ -63,6 +66,9 @@ const util = __importStar(require("./util"));
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
|
||||
@@ -85,6 +91,9 @@ const util = __importStar(require("./util"));
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tempDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;SACV,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;YACpD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;YACxE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,kBAAkB,CAAC;YACrD,KAAK,EAAE,EAAE;YACT,SAAS,EAAE,KAAK;YAChB,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;YACnD,iBAAiB,EAAE,IAAI,CAAC,2BAA2B;SACpD,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
77
lib/analyze-action-env.test.js
generated
Normal file
77
lib/analyze-action-env.test.js
generated
Normal file
@@ -0,0 +1,77 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze = __importStar(require("./analyze"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
// This test needs to be in its own file so that ava would run it in its own
|
||||
// nodejs process. The code being tested is in analyze-action.ts, which runs
|
||||
// immediately on load. So the file needs to be loaded during part of the test,
|
||||
// and that can happen only once per nodejs process. If multiple such tests are
|
||||
// in the same test file, ava would run them in the same nodejs process, and all
|
||||
// but the first test would fail.
|
||||
(0, ava_1.default)("analyze action with RAM & threads from environment variables", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
|
||||
process.env["GITHUB_REPOSITORY"] = "fake/repository";
|
||||
sinon
|
||||
.stub(actionsUtil, "createStatusReportBase")
|
||||
.resolves({});
|
||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
languages: [],
|
||||
});
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("token").returns("fake-token");
|
||||
requiredInputStub.withArgs("upload-database").returns("false");
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
// When there are no action inputs for RAM and threads, the action uses
|
||||
// environment variables (passed down from the init action) to set RAM and
|
||||
// threads usage.
|
||||
process.env["CODEQL_THREADS"] = "-1";
|
||||
process.env["CODEQL_RAM"] = "4992";
|
||||
const runFinalizeStub = sinon.stub(analyze, "runFinalize");
|
||||
const runQueriesStub = sinon.stub(analyze, "runQueries");
|
||||
const analyzeAction = require("./analyze-action");
|
||||
// When analyze-action.ts loads, it runs an async function from the top
|
||||
// level but does not wait for it to finish. To ensure that calls to
|
||||
// runFinalize and runQueries are correctly captured by spies, we explicitly
|
||||
// wait for the action promise to complete before starting verification.
|
||||
await analyzeAction.runPromise;
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=4992");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=4992");
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=analyze-action-env.test.js.map
|
||||
1
lib/analyze-action-env.test.js.map
Normal file
1
lib/analyze-action-env.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
77
lib/analyze-action-input.test.js
generated
Normal file
77
lib/analyze-action-input.test.js
generated
Normal file
@@ -0,0 +1,77 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze = __importStar(require("./analyze"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
// This test needs to be in its own file so that ava would run it in its own
|
||||
// nodejs process. The code being tested is in analyze-action.ts, which runs
|
||||
// immediately on load. So the file needs to be loaded during part of the test,
|
||||
// and that can happen only once per nodejs process. If multiple such tests are
|
||||
// in the same test file, ava would run them in the same nodejs process, and all
|
||||
// but the first test would fail.
|
||||
(0, ava_1.default)("analyze action with RAM & threads from action inputs", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_SERVER_URL"] = "fake-server-url";
|
||||
process.env["GITHUB_REPOSITORY"] = "fake/repository";
|
||||
sinon
|
||||
.stub(actionsUtil, "createStatusReportBase")
|
||||
.resolves({});
|
||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
languages: [],
|
||||
});
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("token").returns("fake-token");
|
||||
requiredInputStub.withArgs("upload-database").returns("false");
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
process.env["CODEQL_THREADS"] = "1";
|
||||
process.env["CODEQL_RAM"] = "4992";
|
||||
// Action inputs have precedence over environment variables.
|
||||
optionalInputStub.withArgs("threads").returns("-1");
|
||||
optionalInputStub.withArgs("ram").returns("3012");
|
||||
const runFinalizeStub = sinon.stub(analyze, "runFinalize");
|
||||
const runQueriesStub = sinon.stub(analyze, "runQueries");
|
||||
const analyzeAction = require("./analyze-action");
|
||||
// When analyze-action.ts loads, it runs an async function from the top
|
||||
// level but does not wait for it to finish. To ensure that calls to
|
||||
// runFinalize and runQueries are correctly captured by spies, we explicitly
|
||||
// wait for the action promise to complete before starting verification.
|
||||
await analyzeAction.runPromise;
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=3012");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=3012");
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=analyze-action-input.test.js.map
|
||||
1
lib/analyze-action-input.test.js.map
Normal file
1
lib/analyze-action-input.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,iBAAiB,CAAC;QACrD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YAClD,SAAS,EAAE,EAAE;SACmB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
94
lib/analyze-action.js
generated
94
lib/analyze-action.js
generated
@@ -19,18 +19,22 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.runPromise = exports.sendStatusReport = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const artifact = __importStar(require("@actions/artifact"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const database_upload_1 = require("./database-upload");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
async function sendStatusReport(startedAt, stats, error) {
|
||||
@@ -44,9 +48,10 @@ async function sendStatusReport(startedAt, stats, error) {
|
||||
};
|
||||
await actionsUtil.sendStatusReport(statusReport);
|
||||
}
|
||||
exports.sendStatusReport = sendStatusReport;
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let uploadStats = undefined;
|
||||
let uploadResult = undefined;
|
||||
let runStats = undefined;
|
||||
let config = undefined;
|
||||
util.initializeEnvironment(util.Mode.actions, pkg.version);
|
||||
@@ -65,11 +70,39 @@ async function run() {
|
||||
url: util.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||
};
|
||||
const outputDir = actionsUtil.getRequiredInput("output");
|
||||
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger);
|
||||
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram"));
|
||||
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
|
||||
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags(config.gitHubVersion, apiDetails, repositoryNwo, logger);
|
||||
// We currently perform an API request in both the `init` and `analyze` Actions to determine
|
||||
// what feature flags are enabled. At the time of writing, this redundant API call is acceptable
|
||||
// to us, but if we wanted to avoid it, we could do so by serializing the feature flags as part
|
||||
// of the config file.
|
||||
void featureFlags.preloadFeatureFlags();
|
||||
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
|
||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
|
||||
if (config.debugMode) {
|
||||
// Upload the SARIF files as an Actions artifact for debugging
|
||||
await uploadDebugArtifacts(config.languages.map((lang) => path.resolve(outputDir, `${lang}.sarif`)), outputDir, config.debugArtifactName);
|
||||
}
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
if (config.debugMode) {
|
||||
// Upload the logs as an Actions artifact for debugging
|
||||
const toUpload = [];
|
||||
for (const language of config.languages) {
|
||||
toUpload.push(...listFolder(path.resolve(util.getCodeQLDatabasePath(config, language), "log")));
|
||||
}
|
||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||
toUpload.push(...listFolder(path.resolve(config.dbLocation, "log")));
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
||||
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
|
||||
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
|
||||
await uploadDebugArtifacts([path.resolve(config.tempDir, "compound-build-tracer.log")], config.tempDir, config.debugArtifactName);
|
||||
}
|
||||
}
|
||||
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
||||
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
|
||||
@@ -80,13 +113,17 @@ async function run() {
|
||||
}
|
||||
core.setOutput("db-locations", dbLocations);
|
||||
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
||||
uploadStats = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
|
||||
uploadResult = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
|
||||
}
|
||||
else {
|
||||
logger.info("Not uploading results");
|
||||
}
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger);
|
||||
// Possibly upload the database bundles for remote queries
|
||||
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, featureFlags, apiDetails, logger);
|
||||
if (uploadResult !== undefined &&
|
||||
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
}
|
||||
}
|
||||
catch (origError) {
|
||||
const error = origError instanceof Error ? origError : new Error(String(origError));
|
||||
@@ -102,6 +139,19 @@ async function run() {
|
||||
return;
|
||||
}
|
||||
finally {
|
||||
if (config !== undefined && config.debugMode) {
|
||||
try {
|
||||
// Upload the database bundles as an Actions artifact for debugging
|
||||
const toUpload = [];
|
||||
for (const language of config.languages) {
|
||||
toUpload.push(await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`));
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
||||
}
|
||||
catch (error) {
|
||||
console.log(`Failed to upload database debug bundles: ${error}`);
|
||||
}
|
||||
}
|
||||
if (core.isDebug() && config !== undefined) {
|
||||
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||
for (const language of config.languages) {
|
||||
@@ -124,8 +174,11 @@ async function run() {
|
||||
}
|
||||
}
|
||||
}
|
||||
if (runStats && uploadStats) {
|
||||
await sendStatusReport(startedAt, { ...runStats, ...uploadStats });
|
||||
if (runStats && uploadResult) {
|
||||
await sendStatusReport(startedAt, {
|
||||
...runStats,
|
||||
...uploadResult.statusReport,
|
||||
});
|
||||
}
|
||||
else if (runStats) {
|
||||
await sendStatusReport(startedAt, { ...runStats });
|
||||
@@ -134,9 +187,32 @@ async function run() {
|
||||
await sendStatusReport(startedAt, undefined);
|
||||
}
|
||||
}
|
||||
async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
|
||||
let suffix = "";
|
||||
const matrix = actionsUtil.getRequiredInput("matrix");
|
||||
if (matrix !== undefined && matrix !== "null") {
|
||||
for (const entry of Object.entries(JSON.parse(matrix)).sort())
|
||||
suffix += `-${entry[1]}`;
|
||||
}
|
||||
await artifact.create().uploadArtifact(actionsUtil.sanitizeArifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
|
||||
}
|
||||
function listFolder(dir) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
const files = [];
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile()) {
|
||||
files.push(path.resolve(dir, entry.name));
|
||||
}
|
||||
else if (entry.isDirectory()) {
|
||||
files.push(...listFolder(path.resolve(dir, entry.name)));
|
||||
}
|
||||
}
|
||||
return files;
|
||||
}
|
||||
exports.runPromise = run();
|
||||
async function runWrapper() {
|
||||
try {
|
||||
await run();
|
||||
await exports.runPromise;
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`analyze action failed: ${error}`);
|
||||
|
||||
File diff suppressed because one or more lines are too long
3
lib/analyze.test.js
generated
3
lib/analyze.test.js
generated
@@ -125,6 +125,9 @@ const util = __importStar(require("./util"));
|
||||
},
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs,
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||
recursive: true,
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
{ "maximumVersion": "3.3", "minimumVersion": "3.0" }
|
||||
{ "maximumVersion": "3.4", "minimumVersion": "3.0" }
|
||||
|
||||
41
lib/codeql.js
generated
41
lib/codeql.js
generated
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getExtraOptions = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CommandInvocationError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
@@ -37,6 +37,7 @@ const languages_1 = require("./languages");
|
||||
const toolcache = __importStar(require("./toolcache"));
|
||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
class CommandInvocationError extends Error {
|
||||
constructor(cmd, args, exitCode, error) {
|
||||
super(`Failure invoking ${cmd} with arguments ${args}.\n
|
||||
@@ -72,22 +73,18 @@ const CODEQL_VERSION_METRICS = "2.5.5";
|
||||
const CODEQL_VERSION_GROUP_RULES = "2.5.5";
|
||||
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
|
||||
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
|
||||
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
|
||||
exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
|
||||
/**
|
||||
* Version above which we use the CLI's indirect build tracing and
|
||||
* multi-language tracing features.
|
||||
* This variable controls using the new style of tracing from the CodeQL
|
||||
* CLI. In particular, with versions above this we will use both indirect
|
||||
* tracing, and multi-language tracing together with database clusters.
|
||||
*
|
||||
* There are currently three blockers on the CLI's side to enabling this:
|
||||
* (1) The logs directory should be created for a DB cluster, as some
|
||||
* autobuilders expect it to be present.
|
||||
* (2) The SEMMLE_PRELOAD_libtrace{32,64}? env variables need to be set.
|
||||
* (3) The .environment and .win32env files need to be created next to
|
||||
* the DB spec.
|
||||
*
|
||||
* Once _all_ of these are fixed, we can enable this by setting the
|
||||
* version flag below to the earliest version of the CLI that resolved
|
||||
* the above issues.
|
||||
* Note that there were bugs in both of these features that were fixed in
|
||||
* release 2.7.0 of the CodeQL CLI, therefore this flag is only enabled for
|
||||
* versions above that.
|
||||
*/
|
||||
exports.CODEQL_VERSION_NEW_TRACING = "99.99.99";
|
||||
exports.CODEQL_VERSION_NEW_TRACING = "2.7.0";
|
||||
function getCodeQLBundleName() {
|
||||
let platform;
|
||||
if (process.platform === "win32") {
|
||||
@@ -220,7 +217,7 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant
|
||||
// specified explicitly (in which case we always honor it).
|
||||
if (!codeqlFolder && !codeqlURL && !forceLatest) {
|
||||
const codeqlVersions = toolcache.findAllVersions("CodeQL", toolCacheDir, logger);
|
||||
if (codeqlVersions.length === 1) {
|
||||
if (codeqlVersions.length === 1 && (0, util_1.isGoodVersion)(codeqlVersions[0])) {
|
||||
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0], toolCacheDir, logger);
|
||||
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
|
||||
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
|
||||
@@ -358,6 +355,15 @@ function getCachedCodeQL() {
|
||||
return cachedCodeQL;
|
||||
}
|
||||
exports.getCachedCodeQL = getCachedCodeQL;
|
||||
/**
|
||||
* Get a real, newly created CodeQL instance for testing. The instance refers to
|
||||
* a non-existent placeholder codeql command, so tests that use this function
|
||||
* should also stub the toolrunner.ToolRunner constructor.
|
||||
*/
|
||||
async function getCodeQLForTesting() {
|
||||
return getCodeQLForCmd("codeql-for-testing", false);
|
||||
}
|
||||
exports.getCodeQLForTesting = getCodeQLForTesting;
|
||||
async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
let cachedVersion = undefined;
|
||||
const codeql = {
|
||||
@@ -568,6 +574,8 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
codeqlArgs.push("--print-metrics-summary");
|
||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_GROUP_RULES))
|
||||
codeqlArgs.push("--sarif-group-rules-by-pack");
|
||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_CUSTOM_QUERY_HELP))
|
||||
codeqlArgs.push("--sarif-add-query-help");
|
||||
if (automationDetailsId !== undefined &&
|
||||
(await util.codeQlVersionAbove(this, CODEQL_VERSION_SARIF_GROUP))) {
|
||||
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||
@@ -630,12 +638,13 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
];
|
||||
await runTool(cmd, codeqlArgs);
|
||||
},
|
||||
async databaseBundle(databasePath, outputFilePath) {
|
||||
async databaseBundle(databasePath, outputFilePath, databaseName) {
|
||||
const args = [
|
||||
"database",
|
||||
"bundle",
|
||||
databasePath,
|
||||
`--output=${outputFilePath}`,
|
||||
`--name=${databaseName}`,
|
||||
];
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
|
||||
File diff suppressed because one or more lines are too long
23
lib/codeql.test.js
generated
23
lib/codeql.test.js
generated
@@ -23,9 +23,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const nock_1 = __importDefault(require("nock"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const codeql = __importStar(require("./codeql"));
|
||||
const defaults = __importStar(require("./defaults.json"));
|
||||
const logging_1 = require("./logging");
|
||||
@@ -217,4 +219,25 @@ ava_1.default.beforeEach(() => {
|
||||
const repoEnv = codeql.getCodeQLActionRepository(logger);
|
||||
t.deepEqual(repoEnv, "xxx/yyy");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-query-help for 2.7.0", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "");
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
|
||||
});
|
||||
function stubToolRunnerConstructor() {
|
||||
const runnerObjectStub = sinon.createStubInstance(toolrunner.ToolRunner);
|
||||
runnerObjectStub.exec.resolves(0);
|
||||
const runnerConstructorStub = sinon.stub(toolrunner, "ToolRunner");
|
||||
runnerConstructorStub.returns(runnerObjectStub);
|
||||
return runnerConstructorStub;
|
||||
}
|
||||
//# sourceMappingURL=codeql.test.js.map
|
||||
File diff suppressed because one or more lines are too long
56
lib/config-utils.js
generated
56
lib/config-utils.js
generated
@@ -25,8 +25,11 @@ const path = __importStar(require("path"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const externalQueries = __importStar(require("./external-queries"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const util_1 = require("./util");
|
||||
// Property names from the user-supplied config file.
|
||||
const NAME_PROPERTY = "name";
|
||||
const DISABLE_DEFAULT_QUERIES_PROPERTY = "disable-default-queries";
|
||||
@@ -116,11 +119,26 @@ const builtinSuites = ["security-extended", "security-and-quality"];
|
||||
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
||||
* Throws an error if suiteName is not a valid builtin suite.
|
||||
*/
|
||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, configFile) {
|
||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureFlags, configFile) {
|
||||
const found = builtinSuites.find((suite) => suite === suiteName);
|
||||
if (!found) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||
}
|
||||
// If we're running the JavaScript security-extended analysis (or a superset of it) and the repo
|
||||
// is opted into the ML-powered queries beta, then add the ML-powered query pack so that we run
|
||||
// the ML-powered queries.
|
||||
if (languages.includes("javascript") &&
|
||||
(found === "security-extended" || found === "security-and-quality") &&
|
||||
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
|
||||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
|
||||
if (!packs.javascript) {
|
||||
packs.javascript = [];
|
||||
}
|
||||
packs.javascript.push({
|
||||
packName: "codeql/javascript-experimental-atm-queries",
|
||||
version: "~0.0.2",
|
||||
});
|
||||
}
|
||||
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
|
||||
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
||||
}
|
||||
@@ -180,7 +198,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
|
||||
* local paths starting with './', or references to remote repos, or
|
||||
* a finite set of hardcoded terms for builtin suites.
|
||||
*/
|
||||
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, workspacePath, apiDetails, logger, configFile) {
|
||||
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureFlags, logger, configFile) {
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
@@ -192,7 +210,7 @@ async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir,
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
||||
await addBuiltinSuiteQueries(languages, codeQL, resultMap, queryUses, configFile);
|
||||
await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureFlags, configFile);
|
||||
return;
|
||||
}
|
||||
// Otherwise, must be a reference to another repo
|
||||
@@ -404,12 +422,12 @@ async function getLanguages(codeQL, languagesInput, repository, apiDetails, logg
|
||||
}
|
||||
return parsedLanguages;
|
||||
}
|
||||
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, workspacePath, apiDetails, logger) {
|
||||
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureFlags, logger) {
|
||||
queriesInput = queriesInput.trim();
|
||||
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
||||
queriesInput = queriesInput.replace(/^\+/, "");
|
||||
for (const query of queriesInput.split(",")) {
|
||||
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, workspacePath, apiDetails, logger);
|
||||
await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
}
|
||||
}
|
||||
// Returns true if either no queries were provided in the workflow.
|
||||
@@ -425,7 +443,7 @@ function shouldAddConfigFileQueries(queriesInput) {
|
||||
/**
|
||||
* Get the default config for when the user has not supplied one.
|
||||
*/
|
||||
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
|
||||
async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
var _a;
|
||||
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
|
||||
const queries = {};
|
||||
@@ -436,10 +454,10 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
|
||||
};
|
||||
}
|
||||
await addDefaultQueries(codeQL, languages, queries);
|
||||
if (queriesInput) {
|
||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
|
||||
}
|
||||
const packs = (_a = parsePacksFromInput(packsInput, languages)) !== null && _a !== void 0 ? _a : {};
|
||||
if (queriesInput) {
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
}
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
@@ -452,13 +470,16 @@ async function getDefaultConfig(languagesInput, queriesInput, packsInput, dbLoca
|
||||
codeQLCmd: codeQL.getPath(),
|
||||
gitHubVersion,
|
||||
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
};
|
||||
}
|
||||
exports.getDefaultConfig = getDefaultConfig;
|
||||
/**
|
||||
* Load the config from the given file.
|
||||
*/
|
||||
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
|
||||
async function loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
var _a;
|
||||
let parsedYAML;
|
||||
if (isLocal(configFile)) {
|
||||
@@ -499,12 +520,13 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
if (!disableDefaultQueries) {
|
||||
await addDefaultQueries(codeQL, languages, queries);
|
||||
}
|
||||
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
|
||||
// If queries were provided using `with` in the action configuration,
|
||||
// they should take precedence over the queries in the config file
|
||||
// unless they're prefixed with "+", in which case they supplement those
|
||||
// in the config file.
|
||||
if (queriesInput) {
|
||||
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, workspacePath, apiDetails, logger);
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
}
|
||||
if (shouldAddConfigFileQueries(queriesInput) &&
|
||||
QUERIES_PROPERTY in parsedYAML) {
|
||||
@@ -517,7 +539,7 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, logger, configFile);
|
||||
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureFlags, logger, configFile);
|
||||
}
|
||||
}
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
@@ -542,7 +564,6 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
|
||||
}
|
||||
}
|
||||
const packs = parsePacks((_a = parsedYAML[PACKS_PROPERTY]) !== null && _a !== void 0 ? _a : {}, packsInput, languages, configFile);
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
@@ -555,6 +576,9 @@ async function loadConfig(languagesInput, queriesInput, packsInput, configFile,
|
||||
codeQLCmd: codeQL.getPath(),
|
||||
gitHubVersion,
|
||||
dbLocation: dbLocationOrDefault(dbLocation, tempDir),
|
||||
debugMode,
|
||||
debugArtifactName,
|
||||
debugDatabaseName,
|
||||
};
|
||||
}
|
||||
/**
|
||||
@@ -680,16 +704,16 @@ function dbLocationOrDefault(dbLocation, tempDir) {
|
||||
* This will parse the config from the user input if present, or generate
|
||||
* a default config. The parsed config is then stored to a known location.
|
||||
*/
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
var _a, _b, _c;
|
||||
let config;
|
||||
// If no config file was provided create an empty one
|
||||
if (!configFile) {
|
||||
logger.debug("No configuration file was provided");
|
||||
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
|
||||
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
}
|
||||
else {
|
||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
|
||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
}
|
||||
// The list of queries should not be empty for any language. If it is then
|
||||
// it is a user configuration error.
|
||||
|
||||
File diff suppressed because one or more lines are too long
96
lib/config-utils.test.js
generated
96
lib/config-utils.test.js
generated
@@ -31,6 +31,7 @@ const sinon = __importStar(require("sinon"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
@@ -88,8 +89,8 @@ function mockListLanguages(languages) {
|
||||
};
|
||||
},
|
||||
});
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger));
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("loading config saves config", async (t) => {
|
||||
@@ -111,7 +112,7 @@ function mockListLanguages(languages) {
|
||||
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
// Sanity check that getConfig returns undefined before we have called initConfig
|
||||
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
|
||||
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
|
||||
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
// And that same newly-initialised config should now be returned by getConfig
|
||||
@@ -122,7 +123,7 @@ function mockListLanguages(languages) {
|
||||
(0, ava_1.default)("load input outside of workspace", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, "../input", undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -135,7 +136,7 @@ function mockListLanguages(languages) {
|
||||
// no filename given, just a repo
|
||||
const configFile = "octo-org/codeql-config@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -149,7 +150,7 @@ function mockListLanguages(languages) {
|
||||
const configFile = "input";
|
||||
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -214,10 +215,13 @@ function mockListLanguages(languages) {
|
||||
gitHubVersion,
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: "my-artifact",
|
||||
debugDatabaseName: "my-db",
|
||||
};
|
||||
const languages = "javascript";
|
||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "my-artifact", "my-db", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
@@ -253,7 +257,7 @@ function mockListLanguages(languages) {
|
||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||
const languages = "javascript";
|
||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolve queries was called correctly
|
||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||
t.deepEqual(resolveQueriesArgs[0].queries, [
|
||||
@@ -296,7 +300,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries
|
||||
// and once for `./foo` from the config file.
|
||||
@@ -329,7 +333,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries and once for `./override`,
|
||||
// but won't be called for './foo' from the config file.
|
||||
@@ -361,7 +365,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for `./workflow-query`,
|
||||
// but won't be called for the default one since that was disabled
|
||||
@@ -387,7 +391,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly:
|
||||
// It'll be called once for the default queries,
|
||||
// and then once for each of the two queries from the workflow
|
||||
@@ -426,7 +430,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, configFilePath, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries,
|
||||
// once for each of additional1 and additional2,
|
||||
@@ -465,7 +469,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
try {
|
||||
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.fail("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -508,7 +512,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
|
||||
const configFile = "octo-org/codeql-config/config.yaml@main";
|
||||
const languages = "javascript";
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(spyGetContents.called);
|
||||
});
|
||||
});
|
||||
@@ -518,7 +522,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -534,7 +538,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, repoReference, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -551,7 +555,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -563,7 +567,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const languages = "rubbish,english";
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -591,7 +595,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
const configFile = path.join(tmpDir, "codeql-config.yaml");
|
||||
fs.writeFileSync(configFile, inputFileContents);
|
||||
const languages = "javascript";
|
||||
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const { packs } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: [
|
||||
{
|
||||
@@ -630,7 +634,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
fs.writeFileSync(configFile, inputFileContents);
|
||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||
const languages = "javascript,python,cpp";
|
||||
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example" }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: [
|
||||
{
|
||||
@@ -683,7 +687,7 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
|
||||
const inputFile = path.join(tmpDir, configFile);
|
||||
fs.writeFileSync(inputFile, inputFileContents, "utf8");
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, configFile, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -866,6 +870,50 @@ parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and
|
||||
(0, ava_1.default)("input with two languages", parseInputAndConfigErrorMacro, {}, "c/d", [languages_1.Language.cpp, languages_1.Language.csharp], /multi-language analysis/);
|
||||
(0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], /remove the '\+'/);
|
||||
(0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], /"xxx" is not a valid pack/);
|
||||
// errors
|
||||
// input w invalid pack name
|
||||
async function mlPoweredQueriesMacro(t, codeQLVersion, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries) {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async getVersion() {
|
||||
return codeQLVersion;
|
||||
},
|
||||
async resolveQueries() {
|
||||
return {
|
||||
byLanguage: {
|
||||
javascript: { "fake-query.ql": {} },
|
||||
},
|
||||
noDeclaredLanguage: {},
|
||||
multipleDeclaredLanguages: {},
|
||||
};
|
||||
},
|
||||
});
|
||||
const { packs } = await configUtils.initConfig("javascript", queriesInput, undefined, undefined, undefined, false, "", "", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
|
||||
? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled]
|
||||
: []), (0, logging_1.getRunnerLogger)(true));
|
||||
if (shouldRunMlPoweredQueries) {
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: [
|
||||
{
|
||||
packName: "codeql/javascript-experimental-atm-queries",
|
||||
version: "~0.0.2",
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
else {
|
||||
t.deepEqual(packs, {});
|
||||
}
|
||||
});
|
||||
}
|
||||
mlPoweredQueriesMacro.title = (_providedTitle, codeQLVersion, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries) => {
|
||||
const queriesInputDescription = queriesInput
|
||||
? `'queries: ${queriesInput}'`
|
||||
: "default config";
|
||||
return `ML-powered queries ${shouldRunMlPoweredQueries ? "are" : "aren't"} loaded for ${queriesInputDescription} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`;
|
||||
};
|
||||
// macro, isMlPoweredQueriesFlagEnabled, queriesInput, shouldRunMlPoweredQueries
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, "security-extended", false);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, "security-extended", false);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, false);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "security-extended", true);
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, "security-and-quality", true);
|
||||
//# sourceMappingURL=config-utils.test.js.map
|
||||
File diff suppressed because one or more lines are too long
60
lib/database-upload.js
generated
60
lib/database-upload.js
generated
@@ -24,8 +24,10 @@ const fs = __importStar(require("fs"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const codeql_1 = require("./codeql");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const util = __importStar(require("./util"));
|
||||
async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||
const util_1 = require("./util");
|
||||
async function uploadDatabases(repositoryNwo, config, featureFlags, apiDetails, logger) {
|
||||
if (actionsUtil.getRequiredInput("upload-database") !== "true") {
|
||||
logger.debug("Database upload disabled in workflow. Skipping upload.");
|
||||
return;
|
||||
@@ -40,38 +42,40 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||
logger.debug("Not analyzing default branch. Skipping upload.");
|
||||
return;
|
||||
}
|
||||
const client = (0, api_client_1.getApiClient)(apiDetails);
|
||||
try {
|
||||
await client.request("GET /repos/:owner/:repo/code-scanning/codeql/databases", {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (util.isHTTPError(e) && e.status === 404) {
|
||||
logger.debug("Repository is not opted in to database uploads. Skipping upload.");
|
||||
}
|
||||
else {
|
||||
console.log(e);
|
||||
logger.info(`Skipping database upload due to unknown error: ${e}`);
|
||||
}
|
||||
if (!(await featureFlags.getValue(feature_flags_1.FeatureFlag.DatabaseUploadsEnabled))) {
|
||||
logger.debug("Repository is not opted in to database uploads. Skipping upload.");
|
||||
return;
|
||||
}
|
||||
const client = (0, api_client_1.getApiClient)(apiDetails);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const useUploadDomain = await featureFlags.getValue(feature_flags_1.FeatureFlag.UploadsDomainEnabled);
|
||||
for (const language of config.languages) {
|
||||
// Bundle the database up into a single zip file
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
const databaseBundlePath = `${databasePath}.zip`;
|
||||
await codeql.databaseBundle(databasePath, databaseBundlePath);
|
||||
// Upload the database bundle
|
||||
const payload = fs.readFileSync(databaseBundlePath);
|
||||
// Upload the database bundle.
|
||||
// Although we are uploading arbitrary file contents to the API, it's worth
|
||||
// noting that it's the API's job to validate that the contents is acceptable.
|
||||
// This API method is available to anyone with write access to the repo.
|
||||
const payload = fs.readFileSync(await (0, util_1.bundleDb)(config, language, codeql, language));
|
||||
try {
|
||||
await client.request(`PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language`, {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
language,
|
||||
data: payload,
|
||||
});
|
||||
if (useUploadDomain) {
|
||||
await client.request(`POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name`, {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
language,
|
||||
name: `${language}-database`,
|
||||
data: payload,
|
||||
headers: {
|
||||
authorization: `token ${apiDetails.auth}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
else {
|
||||
await client.request(`PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language`, {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
language,
|
||||
data: payload,
|
||||
});
|
||||
}
|
||||
logger.debug(`Successfully uploaded database for ${language}`);
|
||||
}
|
||||
catch (e) {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAExB,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,IAAI;QACF,MAAM,MAAM,CAAC,OAAO,CAClB,wDAAwD,EACxD;YACE,KAAK,EAAE,aAAa,CAAC,KAAK;YAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;SACzB,CACF,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3C,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;SACH;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,MAAM,CAAC,IAAI,CAAC,kDAAkD,CAAC,EAAE,CAAC,CAAC;SACpE;QACD,OAAO;KACR;IAED,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,gDAAgD;QAChD,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QAClE,MAAM,kBAAkB,GAAG,GAAG,YAAY,MAAM,CAAC;QACjD,MAAM,MAAM,CAAC,cAAc,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC;QAE9D,6BAA6B;QAC7B,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAAC,kBAAkB,CAAC,CAAC;QACpD,IAAI;YACF,MAAM,MAAM,CAAC,OAAO,CAClB,kEAAkE,EAClE;gBACE,KAAK,EAAE,aAAa,CAAC,KAAK;gBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;gBACxB,QAAQ;gBACR,IAAI,EAAE,OAAO;aACd,CACF,CAAC;YACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AAtED,0CAsEC"}
|
||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAErC,mDAA4D;AAG5D,6CAA+B;AAC/B,iCAAkC;AAE3B,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,YAA0B,EAC1B,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE;QAC9D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;KACR;IAED,iDAAiD;IACjD,IAAI,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;QAC3D,MAAM,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACjE,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE;QACnD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;KACR;IAED,IAAI,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,2BAAW,CAAC,sBAAsB,CAAC,CAAC,EAAE;QACtE,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;QACF,OAAO;KACR;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,UAAU,CAAC,CAAC;IACxC,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,MAAM,eAAe,GAAG,MAAM,YAAY,CAAC,QAAQ,CACjD,2BAAW,CAAC,oBAAoB,CACjC,CAAC;IAEF,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,8BAA8B;QAC9B,2EAA2E;QAC3E,8EAA8E;QAC9E,wEAAwE;QACxE,MAAM,OAAO,GAAG,EAAE,CAAC,YAAY,CAC7B,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CACnD,CAAC;QACF,IAAI;YACF,IAAI,eAAe,EAAE;gBACnB,MAAM,MAAM,CAAC,OAAO,CAClB,wGAAwG,EACxG;oBACE,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;oBAC5B,IAAI,EAAE,OAAO;oBACb,OAAO,EAAE;wBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;qBAC1C;iBACF,CACF,CAAC;aACH;iBAAM;gBACL,MAAM,MAAM,CAAC,OAAO,CAClB,kEAAkE,EAClE;oBACE,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,OAAO;iBACd,CACF,CAAC;aACH;YACD,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACf,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;SACnE;KACF;AACH,CAAC;AA9ED,0CA8EC"}
|
||||
120
lib/database-upload.test.js
generated
120
lib/database-upload.test.js
generated
@@ -30,6 +30,7 @@ const actionsUtil = __importStar(require("./actions-util"));
|
||||
const apiClient = __importStar(require("./api-client"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const database_upload_1 = require("./database-upload");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
@@ -37,6 +38,10 @@ const util_1 = require("./util");
|
||||
ava_1.default.beforeEach(() => {
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||
});
|
||||
const uploadToUploadsDomainFlags = (0, feature_flags_1.createFeatureFlags)([
|
||||
feature_flags_1.FeatureFlag.DatabaseUploadsEnabled,
|
||||
feature_flags_1.FeatureFlag.UploadsDomainEnabled,
|
||||
]);
|
||||
const testRepoName = { owner: "github", repo: "example" };
|
||||
const testApiDetails = {
|
||||
auth: "1234",
|
||||
@@ -55,50 +60,24 @@ function getTestConfig(tmpDir) {
|
||||
gitHubVersion: { type: util_1.GitHubVariant.DOTCOM },
|
||||
dbLocation: tmpDir,
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util_1.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util_1.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
}
|
||||
function getRecordingLogger(messages) {
|
||||
return {
|
||||
debug: (message) => {
|
||||
messages.push({ type: "debug", message });
|
||||
console.debug(message);
|
||||
},
|
||||
info: (message) => {
|
||||
messages.push({ type: "info", message });
|
||||
console.info(message);
|
||||
},
|
||||
warning: (message) => {
|
||||
messages.push({ type: "warning", message });
|
||||
console.warn(message);
|
||||
},
|
||||
error: (message) => {
|
||||
messages.push({ type: "error", message });
|
||||
console.error(message);
|
||||
},
|
||||
isDebug: () => true,
|
||||
startGroup: () => undefined,
|
||||
endGroup: () => undefined,
|
||||
};
|
||||
}
|
||||
function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
||||
async function mockHttpRequests(featureFlags, databaseUploadStatusCode) {
|
||||
// Passing an auth token is required, so we just use a dummy value
|
||||
const client = github.getOctokit("123");
|
||||
const requestSpy = sinon.stub(client, "request");
|
||||
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql/databases");
|
||||
if (optInStatusCode < 300) {
|
||||
optInSpy.resolves(undefined);
|
||||
const url = (await featureFlags.getValue(feature_flags_1.FeatureFlag.UploadsDomainEnabled))
|
||||
? "POST https://uploads.github.com/repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name"
|
||||
: "PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language";
|
||||
const databaseUploadSpy = requestSpy.withArgs(url);
|
||||
if (databaseUploadStatusCode < 300) {
|
||||
databaseUploadSpy.resolves(undefined);
|
||||
}
|
||||
else {
|
||||
optInSpy.throws(new util_1.HTTPError("some error message", optInStatusCode));
|
||||
}
|
||||
if (databaseUploadStatusCode !== undefined) {
|
||||
const databaseUploadSpy = requestSpy.withArgs("PUT /repos/:owner/:repo/code-scanning/codeql/databases/:language");
|
||||
if (databaseUploadStatusCode < 300) {
|
||||
databaseUploadSpy.resolves(undefined);
|
||||
}
|
||||
else {
|
||||
databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
|
||||
}
|
||||
databaseUploadSpy.throws(new util_1.HTTPError("some error message", databaseUploadStatusCode));
|
||||
}
|
||||
sinon.stub(apiClient, "getApiClient").value(() => client);
|
||||
}
|
||||
@@ -111,7 +90,7 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
||||
.returns("false");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Database upload disabled in workflow. Skipping upload.") !== undefined);
|
||||
});
|
||||
@@ -127,7 +106,7 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
||||
const config = getTestConfig(tmpDir);
|
||||
config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" };
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, config, (0, feature_flags_1.createFeatureFlags)([]), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Not running against github.com. Skipping upload.") !== undefined);
|
||||
});
|
||||
@@ -143,7 +122,7 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
||||
const config = getTestConfig(tmpDir);
|
||||
config.gitHubVersion = { type: util_1.GitHubVariant.GHAE };
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, config, testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, config, (0, feature_flags_1.createFeatureFlags)([]), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Not running against github.com. Skipping upload.") !== undefined);
|
||||
});
|
||||
@@ -157,12 +136,12 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Not analyzing default branch. Skipping upload.") !== undefined);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Abort database upload if opt-in request returns 404", async (t) => {
|
||||
(0, ava_1.default)("Abort database upload if feature flag is disabled", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
sinon
|
||||
@@ -170,40 +149,18 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
mockHttpRequests(404);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle() {
|
||||
return;
|
||||
},
|
||||
});
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.UploadsDomainEnabled]), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message ===
|
||||
"Repository is not opted in to database uploads. Skipping upload.") !== undefined);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Abort database upload if opt-in request fails with something other than 404", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
mockHttpRequests(500);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle() {
|
||||
return;
|
||||
},
|
||||
});
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "info" &&
|
||||
v.message ===
|
||||
"Skipping database upload due to unknown error: Error: some error message") !== undefined);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Don't crash if uploading a database fails", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
@@ -212,20 +169,23 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
mockHttpRequests(204, 500);
|
||||
const featureFlags = (0, feature_flags_1.createFeatureFlags)([
|
||||
feature_flags_1.FeatureFlag.DatabaseUploadsEnabled,
|
||||
]);
|
||||
await mockHttpRequests(featureFlags, 500);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
fs.writeFileSync(outputFilePath, "");
|
||||
},
|
||||
});
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), featureFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "warning" &&
|
||||
v.message ===
|
||||
"Failed to upload database for javascript: Error: some error message") !== undefined);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Successfully uploading a database", async (t) => {
|
||||
(0, ava_1.default)("Successfully uploading a database to api.github.com", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
sinon
|
||||
@@ -233,14 +193,34 @@ function mockHttpRequests(optInStatusCode, databaseUploadStatusCode) {
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
mockHttpRequests(204, 201);
|
||||
await mockHttpRequests(uploadToUploadsDomainFlags, 201);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
fs.writeFileSync(outputFilePath, "");
|
||||
},
|
||||
});
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, getRecordingLogger(loggedMessages));
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Successfully uploaded database for javascript") !== undefined);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Successfully uploading a database to uploads.github.com", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
await mockHttpRequests(uploadToUploadsDomainFlags, 201);
|
||||
(0, codeql_1.setCodeQL)({
|
||||
async databaseBundle(_, outputFilePath) {
|
||||
fs.writeFileSync(outputFilePath, "");
|
||||
},
|
||||
});
|
||||
const loggedMessages = [];
|
||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), uploadToUploadsDomainFlags, testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message === "Successfully uploaded database for javascript") !== undefined);
|
||||
});
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20211013"
|
||||
"bundleVersion": "codeql-bundle-20211208"
|
||||
}
|
||||
|
||||
91
lib/feature-flags.js
generated
Normal file
91
lib/feature-flags.js
generated
Normal file
@@ -0,0 +1,91 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createFeatureFlags = exports.GitHubFeatureFlags = exports.FeatureFlag = void 0;
|
||||
const api_client_1 = require("./api-client");
|
||||
const util = __importStar(require("./util"));
|
||||
var FeatureFlag;
|
||||
(function (FeatureFlag) {
|
||||
FeatureFlag["DatabaseUploadsEnabled"] = "database_uploads_enabled";
|
||||
FeatureFlag["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||
FeatureFlag["UploadsDomainEnabled"] = "uploads_domain_enabled";
|
||||
})(FeatureFlag = exports.FeatureFlag || (exports.FeatureFlag = {}));
|
||||
class GitHubFeatureFlags {
|
||||
constructor(gitHubVersion, apiDetails, repositoryNwo, logger) {
|
||||
this.gitHubVersion = gitHubVersion;
|
||||
this.apiDetails = apiDetails;
|
||||
this.repositoryNwo = repositoryNwo;
|
||||
this.logger = logger;
|
||||
}
|
||||
async getValue(flag) {
|
||||
const response = (await this.getApiResponse())[flag];
|
||||
if (response === undefined) {
|
||||
this.logger.debug(`Feature flag '${flag}' undefined in API response, considering it disabled.`);
|
||||
return false;
|
||||
}
|
||||
return response;
|
||||
}
|
||||
async preloadFeatureFlags() {
|
||||
await this.getApiResponse();
|
||||
}
|
||||
async getApiResponse() {
|
||||
const loadApiResponse = async () => {
|
||||
// Do nothing when not running against github.com
|
||||
if (this.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
|
||||
this.logger.debug("Not running against github.com. Disabling all feature flags.");
|
||||
return {};
|
||||
}
|
||||
const client = (0, api_client_1.getApiClient)(this.apiDetails);
|
||||
try {
|
||||
const response = await client.request("GET /repos/:owner/:repo/code-scanning/codeql-action/features", {
|
||||
owner: this.repositoryNwo.owner,
|
||||
repo: this.repositoryNwo.repo,
|
||||
});
|
||||
return response.data;
|
||||
}
|
||||
catch (e) {
|
||||
// Some feature flags, such as `ml_powered_queries_enabled` affect the produced alerts.
|
||||
// Considering these feature flags disabled in the event of a transient error could
|
||||
// therefore lead to alert churn. As a result, we crash if we cannot determine the value of
|
||||
// the feature flags.
|
||||
throw new Error(`Encountered an error while trying to load feature flags: ${e}`);
|
||||
}
|
||||
};
|
||||
const apiResponse = this.cachedApiResponse || (await loadApiResponse());
|
||||
this.cachedApiResponse = apiResponse;
|
||||
return apiResponse;
|
||||
}
|
||||
}
|
||||
exports.GitHubFeatureFlags = GitHubFeatureFlags;
|
||||
/**
|
||||
* Create a feature flags instance with the specified set of enabled flags.
|
||||
*
|
||||
* This should be only used within tests.
|
||||
*/
|
||||
function createFeatureFlags(enabledFlags) {
|
||||
return {
|
||||
getValue: async (flag) => {
|
||||
return enabledFlags.includes(flag);
|
||||
},
|
||||
};
|
||||
}
|
||||
exports.createFeatureFlags = createFeatureFlags;
|
||||
//# sourceMappingURL=feature-flags.js.map
|
||||
1
lib/feature-flags.js.map
Normal file
1
lib/feature-flags.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA8D;AAG9D,6CAA+B;AAM/B,IAAY,WAIX;AAJD,WAAY,WAAW;IACrB,kEAAmD,CAAA;IACnD,qEAAsD,CAAA;IACtD,8DAA+C,CAAA;AACjD,CAAC,EAJW,WAAW,GAAX,mBAAW,KAAX,mBAAW,QAItB;AAUD,MAAa,kBAAkB;IAG7B,YACU,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,eAAU,GAAV,UAAU,CAAkB;QAC5B,kBAAa,GAAb,aAAa,CAAe;QAC5B,WAAM,GAAN,MAAM,CAAQ;IACrB,CAAC;IAEJ,KAAK,CAAC,QAAQ,CAAC,IAAiB;QAC9B,MAAM,QAAQ,GAAG,CAAC,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC;QACrD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,iBAAiB,IAAI,uDAAuD,CAC7E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,KAAK,CAAC,mBAAmB;QACvB,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;IAC9B,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;YACjC,iDAAiD;YACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;gBACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8DAA8D,CAC/D,CAAC;gBACF,OAAO,EAAE,CAAC;aACX;YACD,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC7C,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,8DAA8D,EAC9D;oBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;oBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;iBAC9B,CACF,CAAC;gBACF,OAAO,QAAQ,CAAC,IAAI,CAAC;aACtB;YAAC,OAAO,CAAC,EAAE;gBACV,uFAAuF;gBACvF,mFAAmF;gBACnF,2FAA2F;gBAC3F,qBAAqB;gBACrB,MAAM,IAAI,KAAK,CACb,4DAA4D,CAAC,EAAE,CAChE,CAAC;aACH;QACH,CAAC,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,CAAC,iBAAiB,IAAI,CAAC,MAAM,eAAe,EAAE,CAAC,CAAC;QACxE,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;CACF;AA3DD,gDA2DC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,YAA2B;IAC5D,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YACvB,OAAO,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;AACJ,CAAC;AAND,gDAMC"}
|
||||
98
lib/feature-flags.test.js
generated
Normal file
98
lib/feature-flags.test.js
generated
Normal file
@@ -0,0 +1,98 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
ava_1.default.beforeEach(() => {
|
||||
(0, util_1.initializeEnvironment)(util_1.Mode.actions, "1.2.3");
|
||||
});
|
||||
const testApiDetails = {
|
||||
auth: "1234",
|
||||
url: "https://github.com",
|
||||
};
|
||||
const testRepositoryNwo = (0, repository_1.parseRepositoryNwo)("github/example");
|
||||
const ALL_FEATURE_FLAGS_DISABLED_VARIANTS = [
|
||||
{
|
||||
description: "GHES",
|
||||
gitHubVersion: { type: util_1.GitHubVariant.GHES, version: "3.0.0" },
|
||||
},
|
||||
{ description: "GHAE", gitHubVersion: { type: util_1.GitHubVariant.GHAE } },
|
||||
];
|
||||
for (const variant of ALL_FEATURE_FLAGS_DISABLED_VARIANTS) {
|
||||
(0, ava_1.default)(`All feature flags are disabled if running against ${variant.description}`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const loggedMessages = [];
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags(variant.gitHubVersion, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
|
||||
t.assert((await featureFlags.getValue(flag)) === false);
|
||||
}
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message ===
|
||||
"Not running against github.com. Disabling all feature flags.") !== undefined);
|
||||
});
|
||||
});
|
||||
}
|
||||
(0, ava_1.default)("Feature flags are disabled if they're not returned in API response", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const loggedMessages = [];
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
|
||||
t.assert((await featureFlags.getValue(flag)) === false);
|
||||
}
|
||||
for (const featureFlag of [
|
||||
"database_uploads_enabled",
|
||||
"ml_powered_queries_enabled",
|
||||
"uploads_domain_enabled",
|
||||
]) {
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message ===
|
||||
`Feature flag '${featureFlag}' undefined in API response, considering it disabled.`) !== undefined);
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Feature flags exception is propagated if the API request errors", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, logging_1.getRunnerLogger)(true));
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(500, {});
|
||||
await t.throwsAsync(async () => featureFlags.preloadFeatureFlags(), {
|
||||
message: "Encountered an error while trying to load feature flags: Error: some error message",
|
||||
});
|
||||
});
|
||||
});
|
||||
const FEATURE_FLAGS = [
|
||||
"database_uploads_enabled",
|
||||
"ml_powered_queries_enabled",
|
||||
"uploads_domain_enabled",
|
||||
];
|
||||
for (const featureFlag of FEATURE_FLAGS) {
|
||||
(0, ava_1.default)(`Feature flag '${featureFlag}' is enabled if enabled in the API response`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, logging_1.getRunnerLogger)(true));
|
||||
const expectedFeatureFlags = {};
|
||||
for (const f of FEATURE_FLAGS) {
|
||||
expectedFeatureFlags[f] = false;
|
||||
}
|
||||
expectedFeatureFlags[featureFlag] = true;
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureFlags);
|
||||
const actualFeatureFlags = {
|
||||
database_uploads_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.DatabaseUploadsEnabled),
|
||||
ml_powered_queries_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled),
|
||||
uploads_domain_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.UploadsDomainEnabled),
|
||||
};
|
||||
t.deepEqual(actualFeatureFlags, expectedFeatureFlags);
|
||||
});
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=feature-flags.test.js.map
|
||||
1
lib/feature-flags.test.js.map
Normal file
1
lib/feature-flags.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"feature-flags.test.js","sourceRoot":"","sources":["../src/feature-flags.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAGvB,mDAAkE;AAClE,uCAA4C;AAC5C,6CAAkD;AAClD,mDAMyB;AAEzB,iCAAgF;AAEhF,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC;AAEH,MAAM,cAAc,GAAqB;IACvC,IAAI,EAAE,MAAM;IACZ,GAAG,EAAE,oBAAoB;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAA,+BAAkB,EAAC,gBAAgB,CAAC,CAAC;AAE/D,MAAM,mCAAmC,GAGpC;IACH;QACE,WAAW,EAAE,MAAM;QACnB,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC9D;IACD,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,EAAE;CACrE,CAAC;AAEF,KAAK,MAAM,OAAO,IAAI,mCAAmC,EAAE;IACzD,IAAA,aAAI,EAAC,qDAAqD,OAAO,CAAC,WAAW,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC3F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,OAAO,CAAC,aAAa,EACrB,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;YAEF,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;gBAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;aACzD;YAED,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,8DAA8D,CACnE,KAAK,SAAS,CAChB,CAAC;QACJ,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ;AAED,IAAA,aAAI,EAAC,oEAAoE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACrF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,cAAc,GAAG,EAAE,CAAC;QAC1B,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,kCAAkB,EAAC,cAAc,CAAC,CACnC,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC,2BAAW,CAAC,EAAE;YAC7C,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC;SACzD;QAED,KAAK,MAAM,WAAW,IAAI;YACxB,0BAA0B;YAC1B,4BAA4B;YAC5B,wBAAwB;SACzB,EAAE;YACD,CAAC,CAAC,MAAM,CACN,cAAc,CAAC,IAAI,CACjB,CAAC,CAAgB,EAAE,EAAE,CACnB,CAAC,CAAC,IAAI,KAAK,OAAO;gBAClB,CAAC,CAAC,OAAO;oBACP,iBAAiB,WAAW,uDAAuD,CACxF,KAAK,SAAS,CAChB,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iEAAiE,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClF,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;QAEF,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,MAAM,CAAC,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE,CAAC,YAAY,CAAC,mBAAmB,EAAE,EAAE;YAClE,OAAO,EACL,oFAAoF;SACvF,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,MAAM,aAAa,GAAG;IACpB,0BAA0B;IAC1B,4BAA4B;IAC5B,wBAAwB;CACzB,CAAC;AAEF,KAAK,MAAM,WAAW,IAAI,aAAa,EAAE;IACvC,IAAA,aAAI,EAAC,iBAAiB,WAAW,6CAA6C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;QAC1F,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;YAChC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;YAEjC,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE,EAC9B,cAAc,EACd,iBAAiB,EACjB,IAAA,yBAAe,EAAC,IAAI,CAAC,CACtB,CAAC;YAEF,MAAM,oBAAoB,GAAG,EAAE,CAAC;YAChC,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;gBAC7B,oBAAoB,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;aACjC;YACD,oBAAoB,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC;YACzC,IAAA,0CAA0B,EAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAEtD,MAAM,kBAAkB,GAAG;gBACzB,wBAAwB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACnD,2BAAW,CAAC,sBAAsB,CACnC;gBACD,0BAA0B,EAAE,MAAM,YAAY,CAAC,QAAQ,CACrD,2BAAW,CAAC,uBAAuB,CACpC;gBACD,sBAAsB,EAAE,MAAM,YAAY,CAAC,QAAQ,CACjD,2BAAW,CAAC,oBAAoB,CACjC;aACF,CAAC;YAEF,CAAC,CAAC,SAAS,CAAC,kBAAkB,EAAE,oBAAoB,CAAC,CAAC;QACxD,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;CACJ"}
|
||||
21
lib/init-action.js
generated
21
lib/init-action.js
generated
@@ -23,6 +23,7 @@ const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const codeql_1 = require("./codeql");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const init_1 = require("./init");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
@@ -78,6 +79,9 @@ async function run() {
|
||||
};
|
||||
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.actions);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger);
|
||||
void featureFlags.preloadFeatureFlags();
|
||||
try {
|
||||
const workflowErrors = await (0, actions_util_1.validateWorkflow)();
|
||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
||||
@@ -87,7 +91,11 @@ async function run() {
|
||||
codeql = initCodeQLResult.codeql;
|
||||
toolsVersion = initCodeQLResult.toolsVersion;
|
||||
await (0, util_1.enrichEnvironment)(util_1.Mode.actions, codeql);
|
||||
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), (0, actions_util_1.getTemporaryDirectory)(), (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE"), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger);
|
||||
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), (0, actions_util_1.getOptionalInput)("debug") === "true", (0, actions_util_1.getOptionalInput)("debug-artifact-name") || util_1.DEFAULT_DEBUG_ARTIFACT_NAME, (0, actions_util_1.getOptionalInput)("debug-database-name") || util_1.DEFAULT_DEBUG_DATABASE_NAME, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), (0, util_1.getRequiredEnvParam)("RUNNER_TOOL_CACHE"), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, featureFlags, logger);
|
||||
if (config.languages.some(languages_1.isTracedLanguage)) {
|
||||
// We currently do not support tracing on Windows 11 and Windows Server 2022
|
||||
(0, util_1.checkNotWindows11)();
|
||||
}
|
||||
if (config.languages.includes(languages_1.Language.python) &&
|
||||
(0, actions_util_1.getRequiredInput)("setup-python-dependencies") === "true") {
|
||||
try {
|
||||
@@ -113,9 +121,14 @@ async function run() {
|
||||
core.exportVariable("GOFLAGS", goFlags);
|
||||
core.warning("Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action.");
|
||||
}
|
||||
// Setup CODEQL_RAM flag (todo improve this https://github.com/github/dsp-code-scanning/issues/935)
|
||||
const codeqlRam = process.env["CODEQL_RAM"] || "6500";
|
||||
core.exportVariable("CODEQL_RAM", codeqlRam);
|
||||
// Limit RAM and threads for extractors. When running extractors, the CodeQL CLI obeys the
|
||||
// CODEQL_RAM and CODEQL_THREADS environment variables to decide how much RAM and how many
|
||||
// threads it would ask extractors to use. See help text for the "--ram" and "--threads"
|
||||
// options at https://codeql.github.com/docs/codeql-cli/manual/database-trace-command/
|
||||
// for details.
|
||||
core.exportVariable("CODEQL_RAM", process.env["CODEQL_RAM"] ||
|
||||
(0, util_1.getMemoryFlagValue)((0, actions_util_1.getOptionalInput)("ram")).toString());
|
||||
core.exportVariable("CODEQL_THREADS", (0, util_1.getThreadsFlagValue)((0, actions_util_1.getOptionalInput)("threads"), logger).toString());
|
||||
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
|
||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", undefined);
|
||||
if (tracerConfig !== undefined) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
4
lib/init.js
generated
4
lib/init.js
generated
@@ -38,9 +38,9 @@ async function initCodeQL(codeqlURL, apiDetails, tempDir, toolCacheDir, variant,
|
||||
return { codeql, toolsVersion };
|
||||
}
|
||||
exports.initCodeQL = initCodeQL;
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger) {
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
logger.startGroup("Load language configuration");
|
||||
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, logger);
|
||||
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, toolCacheDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
analysisPaths.printPathFiltersWarning(config, logger);
|
||||
logger.endGroup();
|
||||
return config;
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAlCD,gCAkCC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;QAChE,0BAA0B;QAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,SAAS,EAChB,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;KACH;SAAM;QACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,yBAAyB;YACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;SACH;KACF;IAED,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AA9BD,0BA8BC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,YAAoB,EACpB,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,YAAY,EACZ,OAAO,EACP,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAA8B,EAC9B,UAA8B,EAC9B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,YAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,UAAU,EACV,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,YAAY,EACZ,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA1CD,gCA0CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC;IAEhC,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;QAChE,0BAA0B;QAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,SAAS,EAChB,UAAU,EACV,WAAW,EACX,YAAY,CACb,CAAC;KACH;SAAM;QACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;YACvC,yBAAyB;YACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;SACH;KACF;IAED,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AA9BD,0BA8BC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAtCD,8CAsCC"}
|
||||
51
lib/runner.js
generated
51
lib/runner.js
generated
@@ -18,15 +18,20 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const commander_1 = require("commander");
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const autobuild_1 = require("./autobuild");
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const init_1 = require("./init");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
@@ -54,11 +59,14 @@ function getToolsDir(userInput) {
|
||||
return toolsDir;
|
||||
}
|
||||
const codeqlEnvJsonFilename = "codeql-env.json";
|
||||
function loadTracerEnvironment(config) {
|
||||
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
|
||||
return JSON.parse(fs.readFileSync(jsonEnvFile).toString("utf-8"));
|
||||
}
|
||||
// Imports the environment from codeqlEnvJsonFilename if not already present
|
||||
function importTracerEnvironment(config) {
|
||||
if (!("ODASA_TRACER_CONFIGURATION" in process.env)) {
|
||||
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
|
||||
const env = JSON.parse(fs.readFileSync(jsonEnvFile).toString("utf-8"));
|
||||
const env = loadTracerEnvironment(config);
|
||||
for (const key of Object.keys(env)) {
|
||||
process.env[key] = env[key];
|
||||
}
|
||||
@@ -116,6 +124,12 @@ program
|
||||
.option("--debug", "Print more verbose output", false)
|
||||
.option("--trace-process-name <string>", "(Advanced, windows-only) Inject a windows tracer of this process into a process with the given process name.")
|
||||
.option("--trace-process-level <number>", "(Advanced, windows-only) Inject a windows tracer of this process into a parent process <number> levels up.")
|
||||
.option("--ram <number>", "The amount of memory in MB that can be used by CodeQL extractors. " +
|
||||
"By default, CodeQL extractors will use most of the memory available in the system. " +
|
||||
'This input also sets the amount of memory that can later be used by the "analyze" command.')
|
||||
.option("--threads <number>", "The number of threads that can be used by CodeQL extractors. " +
|
||||
"By default, CodeQL extractors will use all the hardware threads available in the system. " +
|
||||
'This input also sets the number of threads that can later be used by the "analyze" command.')
|
||||
.action(async (cmd) => {
|
||||
const logger = (0, logging_1.getRunnerLogger)(cmd.debug);
|
||||
try {
|
||||
@@ -124,7 +138,7 @@ program
|
||||
const checkoutPath = cmd.checkoutPath || process.cwd();
|
||||
// Wipe the temp dir
|
||||
logger.info(`Cleaning temp directory ${tempDir}`);
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
await (0, del_1.default)(tempDir, { force: true });
|
||||
fs.mkdirSync(tempDir, { recursive: true });
|
||||
const auth = await (0, util_1.getGitHubAuth)(logger, cmd.githubAuth, cmd.githubAuthStdin);
|
||||
const apiDetails = {
|
||||
@@ -134,6 +148,13 @@ program
|
||||
};
|
||||
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.runner);
|
||||
// Limit RAM and threads for extractors. When running extractors, the CodeQL CLI obeys the
|
||||
// CODEQL_RAM and CODEQL_THREADS environment variables to decide how much RAM and how many
|
||||
// threads it would ask extractors to use. See help text for the "--ram" and "--threads"
|
||||
// options at https://codeql.github.com/docs/codeql-cli/manual/database-trace-command/
|
||||
// for details.
|
||||
process.env["CODEQL_RAM"] = (0, util_1.getMemoryFlagValue)(cmd.ram).toString();
|
||||
process.env["CODEQL_THREADS"] = (0, util_1.getThreadsFlagValue)(cmd.threads, logger).toString();
|
||||
let codeql;
|
||||
if (cmd.codeqlPath !== undefined) {
|
||||
codeql = await (0, codeql_1.getCodeQL)(cmd.codeqlPath);
|
||||
@@ -143,7 +164,7 @@ program
|
||||
}
|
||||
await (0, util_1.enrichEnvironment)(util_1.Mode.runner, codeql);
|
||||
const workspacePath = checkoutPath;
|
||||
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, logger);
|
||||
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, cmd.configFile, undefined, false, "", "", (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, toolsDir, codeql, workspacePath, gitHubVersion, apiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
const sourceRoot = checkoutPath;
|
||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, parseTraceProcessName(), parseTraceProcessLevel());
|
||||
if (tracerConfig === undefined) {
|
||||
@@ -240,10 +261,15 @@ program
|
||||
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
|
||||
.option("--no-upload", "Do not upload results after analysis.")
|
||||
.option("--output-dir <dir>", "Directory to output SARIF files to. Default is in the temp directory.")
|
||||
.option("--ram <ram>", "Amount of memory to use when running queries. Default is to use all available memory.")
|
||||
.option("--ram <ram>", "The amount of memory in MB that can be used by CodeQL for database finalization and query execution. " +
|
||||
'By default, this command will use the same amount of memory as previously set in the "init" command. ' +
|
||||
'If the "init" command also does not have an explicit "ram" flag, this command will use most of the ' +
|
||||
"memory available in the system.")
|
||||
.option("--no-add-snippets", "Specify whether to include code snippets in the sarif output.")
|
||||
.option("--threads <threads>", "Number of threads to use when running queries. " +
|
||||
"Default is to use all available cores.")
|
||||
.option("--threads <threads>", "The number of threads that can be used by CodeQL for database finalization and query execution. " +
|
||||
'By default, this command will use the same number of threads as previously set in the "init" command. ' +
|
||||
'If the "init" command also does not have an explicit "threads" flag, this command will use all the ' +
|
||||
"hardware threads available in the system.")
|
||||
.option("--temp-dir <dir>", 'Directory to use for temporary files. Default is "./codeql-runner".')
|
||||
.option("--category <category>", "String used by Code Scanning for matching the analyses.")
|
||||
.option("--debug", "Print more verbose output", false)
|
||||
@@ -262,8 +288,15 @@ program
|
||||
url: (0, util_1.parseGitHubUrl)(cmd.githubUrl),
|
||||
};
|
||||
const outputDir = cmd.outputDir || path.join(config.tempDir, "codeql-sarif");
|
||||
const threads = (0, util_1.getThreadsFlag)(cmd.threads, logger);
|
||||
const memory = (0, util_1.getMemoryFlag)(cmd.ram);
|
||||
let initEnv = {};
|
||||
try {
|
||||
initEnv = loadTracerEnvironment(config);
|
||||
}
|
||||
catch (err) {
|
||||
// The init command did not generate a tracer environment file
|
||||
}
|
||||
const threads = (0, util_1.getThreadsFlag)(cmd.threads || initEnv["CODEQL_THREADS"], logger);
|
||||
const memory = (0, util_1.getMemoryFlag)(cmd.ram || initEnv["CODEQL_RAM"]);
|
||||
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger);
|
||||
await (0, analyze_1.runQueries)(outputDir, memory, (0, util_1.getAddSnippetsFlag)(cmd.addSnippets), threads, cmd.category, config, logger);
|
||||
if (!cmd.upload) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
49
lib/testing-utils.js
generated
49
lib/testing-utils.js
generated
@@ -19,9 +19,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.setupActionsVars = exports.setupTests = void 0;
|
||||
exports.mockFeatureFlagApiEndpoint = exports.getRecordingLogger = exports.setupActionsVars = exports.setupTests = void 0;
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const apiClient = __importStar(require("./api-client"));
|
||||
const CodeQL = __importStar(require("./codeql"));
|
||||
const util_1 = require("./util");
|
||||
function wrapOutput(context) {
|
||||
// Function signature taken from Socket.write.
|
||||
// Note there are two overloads:
|
||||
@@ -89,4 +92,48 @@ function setupActionsVars(tempDir, toolsDir) {
|
||||
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
|
||||
}
|
||||
exports.setupActionsVars = setupActionsVars;
|
||||
function getRecordingLogger(messages) {
|
||||
return {
|
||||
debug: (message) => {
|
||||
messages.push({ type: "debug", message });
|
||||
console.debug(message);
|
||||
},
|
||||
info: (message) => {
|
||||
messages.push({ type: "info", message });
|
||||
console.info(message);
|
||||
},
|
||||
warning: (message) => {
|
||||
messages.push({ type: "warning", message });
|
||||
console.warn(message);
|
||||
},
|
||||
error: (message) => {
|
||||
messages.push({ type: "error", message });
|
||||
console.error(message);
|
||||
},
|
||||
isDebug: () => true,
|
||||
startGroup: () => undefined,
|
||||
endGroup: () => undefined,
|
||||
};
|
||||
}
|
||||
exports.getRecordingLogger = getRecordingLogger;
|
||||
/** Mock the HTTP request to the feature flags enablement API endpoint. */
|
||||
function mockFeatureFlagApiEndpoint(responseStatusCode, response) {
|
||||
// Passing an auth token is required, so we just use a dummy value
|
||||
const client = github.getOctokit("123");
|
||||
const requestSpy = sinon.stub(client, "request");
|
||||
const optInSpy = requestSpy.withArgs("GET /repos/:owner/:repo/code-scanning/codeql-action/features");
|
||||
if (responseStatusCode < 300) {
|
||||
optInSpy.resolves({
|
||||
status: responseStatusCode,
|
||||
data: response,
|
||||
headers: {},
|
||||
url: "GET /repos/:owner/:repo/code-scanning/codeql-action/features",
|
||||
});
|
||||
}
|
||||
else {
|
||||
optInSpy.throws(new util_1.HTTPError("some error message", responseStatusCode));
|
||||
}
|
||||
sinon.stub(apiClient, "getApiClient").value(() => client);
|
||||
}
|
||||
exports.mockFeatureFlagApiEndpoint = mockFeatureFlagApiEndpoint;
|
||||
//# sourceMappingURL=testing-utils.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,6CAA+B;AAE/B,iDAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC"}
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAwB;IACjD,MAAM,SAAS,GAAG,IAAkC,CAAC;IAErD,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAvCD,gCAuCC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;AAC9C,CAAC;AAHD,4CAGC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}
|
||||
12
lib/toolcache.js
generated
12
lib/toolcache.js
generated
@@ -18,6 +18,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadTool = exports.findAllVersions = exports.find = exports.cacheDir = exports.extractTar = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
@@ -27,6 +30,7 @@ const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const actionsToolcache = __importStar(require("@actions/tool-cache"));
|
||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const uuid_1 = require("uuid");
|
||||
const util_1 = require("./util");
|
||||
@@ -123,7 +127,7 @@ async function cacheDir(sourceDir, tool, version, toolCacheDir, logger) {
|
||||
throw new Error("sourceDir is not a directory");
|
||||
}
|
||||
// Create the tool dir
|
||||
const destPath = createToolPath(tool, version, arch, toolCacheDir, logger);
|
||||
const destPath = await createToolPath(tool, version, arch, toolCacheDir, logger);
|
||||
// copy each child item. do not move. move can fail on Windows
|
||||
// due to anti-virus software having an open handle on a file.
|
||||
for (const itemName of fs.readdirSync(sourceDir)) {
|
||||
@@ -232,12 +236,12 @@ function createExtractFolder(tempDir) {
|
||||
}
|
||||
return dest;
|
||||
}
|
||||
function createToolPath(tool, version, arch, toolCacheDir, logger) {
|
||||
async function createToolPath(tool, version, arch, toolCacheDir, logger) {
|
||||
const folderPath = path.join(toolCacheDir, tool, semver.clean(version) || version, arch || "");
|
||||
logger.debug(`destination ${folderPath}`);
|
||||
const markerPath = `${folderPath}.complete`;
|
||||
fs.rmSync(folderPath, { recursive: true, force: true });
|
||||
fs.rmSync(markerPath, { recursive: true, force: true });
|
||||
await (0, del_1.default)(folderPath, { force: true });
|
||||
await (0, del_1.default)(markerPath, { force: true });
|
||||
fs.mkdirSync(folderPath, { recursive: true });
|
||||
return folderPath;
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
8
lib/toolrunner-error-catcher.js
generated
8
lib/toolrunner-error-catcher.js
generated
@@ -44,10 +44,6 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
|
||||
if (((_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout) !== undefined) {
|
||||
options.listeners.stdout(data);
|
||||
}
|
||||
else {
|
||||
// if no stdout listener was originally defined then we match default behavior of Toolrunner
|
||||
process.stdout.write(data);
|
||||
}
|
||||
},
|
||||
stderr: (data) => {
|
||||
var _a;
|
||||
@@ -55,10 +51,6 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
|
||||
if (((_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stderr) !== undefined) {
|
||||
options.listeners.stderr(data);
|
||||
}
|
||||
else {
|
||||
// if no stderr listener was originally defined then we match default behavior of Toolrunner
|
||||
process.stderr.write(data);
|
||||
}
|
||||
},
|
||||
};
|
||||
// we capture the original return code or error so that if no match is found we can duplicate the behavior
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7D;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAzED,wDAyEC"}
|
||||
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7D;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAnED,wDAmEC"}
|
||||
18
lib/tracer-config.js
generated
18
lib/tracer-config.js
generated
@@ -182,15 +182,15 @@ async function getCombinedTracerConfig(config, codeql) {
|
||||
tracedLanguageConfigs[language] = await getTracerConfigForLanguage(codeql, config, language);
|
||||
}
|
||||
mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
|
||||
}
|
||||
// Add a couple more variables
|
||||
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
|
||||
const codeQLDir = path.dirname(codeql.getPath());
|
||||
if (process.platform === "darwin") {
|
||||
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
|
||||
}
|
||||
else if (process.platform !== "win32") {
|
||||
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
|
||||
// Add a couple more variables
|
||||
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
|
||||
const codeQLDir = path.dirname(codeql.getPath());
|
||||
if (process.platform === "darwin") {
|
||||
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
|
||||
}
|
||||
else if (process.platform !== "win32") {
|
||||
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
|
||||
}
|
||||
}
|
||||
// On macos it's necessary to prefix the build command with the runner executable
|
||||
// on order to trace when System Integrity Protection is enabled.
|
||||
|
||||
File diff suppressed because one or more lines are too long
3
lib/tracer-config.test.js
generated
3
lib/tracer-config.test.js
generated
@@ -44,6 +44,9 @@ function getTestConfig(tmpDir) {
|
||||
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
};
|
||||
}
|
||||
// A very minimal setup
|
||||
|
||||
File diff suppressed because one or more lines are too long
98
lib/upload-lib.js
generated
98
lib/upload-lib.js
generated
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
|
||||
exports.validateUniqueCategory = exports.waitForProcessing = exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const zlib_1 = __importDefault(require("zlib"));
|
||||
@@ -105,6 +105,7 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
|
||||
});
|
||||
logger.debug(`response status: ${response.status}`);
|
||||
logger.info("Successfully uploaded results");
|
||||
return response.data.id;
|
||||
}
|
||||
// Recursively walks a directory and returns all SARIF files it finds.
|
||||
// Does not follow symlinks.
|
||||
@@ -243,14 +244,7 @@ exports.buildPayload = buildPayload;
|
||||
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, gitHubVersion, apiDetails, logger) {
|
||||
logger.startGroup("Uploading results");
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
||||
if (util.isActions()) {
|
||||
// This check only works on actions as env vars don't persist between calls to the runner
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
}
|
||||
validateUniqueCategory(category);
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
validateSarifFileSchema(file, logger);
|
||||
@@ -270,12 +264,90 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||
// Make the upload
|
||||
await uploadPayload(payload, repositoryNwo, apiDetails, logger);
|
||||
const sarifID = await uploadPayload(payload, repositoryNwo, apiDetails, logger);
|
||||
logger.endGroup();
|
||||
return {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif,
|
||||
statusReport: {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif,
|
||||
},
|
||||
sarifID,
|
||||
};
|
||||
}
|
||||
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
|
||||
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
|
||||
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
|
||||
async function waitForProcessing(repositoryNwo, sarifID, apiDetails, logger) {
|
||||
logger.startGroup("Waiting for processing to finish");
|
||||
const client = api.getApiClient(apiDetails);
|
||||
const statusCheckingStarted = Date.now();
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (Date.now() >
|
||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
||||
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
||||
break;
|
||||
}
|
||||
try {
|
||||
const response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
sarif_id: sarifID,
|
||||
});
|
||||
const status = response.data.processing_status;
|
||||
logger.info(`Analysis upload status is ${status}.`);
|
||||
if (status === "complete") {
|
||||
break;
|
||||
}
|
||||
else if (status === "failed") {
|
||||
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
if (util.isHTTPError(e)) {
|
||||
switch (e.status) {
|
||||
case 404:
|
||||
logger.debug("Analysis is not found yet...");
|
||||
break; // Note this breaks from the case statement, not the outer loop.
|
||||
default:
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||
}
|
||||
logger.endGroup();
|
||||
}
|
||||
exports.waitForProcessing = waitForProcessing;
|
||||
function validateUniqueCategory(category) {
|
||||
if (util.isActions()) {
|
||||
// This check only works on actions as env vars don't persist between calls to the runner
|
||||
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF${category ? `_${sanitize(category)}` : ""}`;
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per category. " +
|
||||
"Please specify a unique `category` to call this action multiple times. " +
|
||||
`Category: ${category ? category : "(none)"}`);
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
}
|
||||
}
|
||||
exports.validateUniqueCategory = validateUniqueCategory;
|
||||
/**
|
||||
* Santizes a string to be used as an environment variable name.
|
||||
* This will replace all non-alphanumeric characters with underscores.
|
||||
* There could still be some false category clashes if two uploads
|
||||
* occur that differ only in their non-alphanumeric characters. This is
|
||||
* unlikely.
|
||||
*
|
||||
* @param str the initial value to sanitize
|
||||
*/
|
||||
function sanitize(str) {
|
||||
return str.replace(/[^a-zA-Z0-9_]/g, "_");
|
||||
}
|
||||
//# sourceMappingURL=upload-lib.js.map
|
||||
File diff suppressed because one or more lines are too long
16
lib/upload-lib.test.js
generated
16
lib/upload-lib.test.js
generated
@@ -113,4 +113,20 @@ ava_1.default.beforeEach(() => {
|
||||
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, undefined, analysisKey, '{"os": "linux", "language": "javascript"}');
|
||||
t.deepEqual(modifiedSarif, expectedSarif);
|
||||
});
|
||||
(0, ava_1.default)("validateUniqueCategory", (t) => {
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory(undefined));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(undefined));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc"));
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("def"));
|
||||
// Our category sanitization is not perfect. Here are some examples
|
||||
// of where we see false clashes
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc/def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc@def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc_def"));
|
||||
t.throws(() => uploadLib.validateUniqueCategory("abc def"));
|
||||
// this one is fine
|
||||
t.notThrows(() => uploadLib.validateUniqueCategory("abc_ def"));
|
||||
});
|
||||
//# sourceMappingURL=upload-lib.test.js.map
|
||||
File diff suppressed because one or more lines are too long
8
lib/upload-sarif-action.js
generated
8
lib/upload-sarif-action.js
generated
@@ -22,6 +22,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
@@ -46,8 +47,11 @@ async function run() {
|
||||
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
|
||||
};
|
||||
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
|
||||
const uploadStats = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
await sendSuccessStatusReport(startedAt, uploadStats);
|
||||
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
|
||||
}
|
||||
await sendSuccessStatusReport(startedAt, uploadResult.statusReport);
|
||||
}
|
||||
catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YAClE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
96
lib/util.js
generated
96
lib/util.js
generated
@@ -18,20 +18,37 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
exports.checkNotWindows11 = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const apiCompatibility = __importStar(require("./api-compatibility.json"));
|
||||
const codeql_1 = require("./codeql");
|
||||
/**
|
||||
* Specifies bundle versions that are known to be broken
|
||||
* and will not be used if found in the toolcache.
|
||||
*/
|
||||
const BROKEN_VERSIONS = ["0.0.0-20211207"];
|
||||
/**
|
||||
* The URL for github.com.
|
||||
*/
|
||||
exports.GITHUB_DOTCOM_URL = "https://github.com";
|
||||
/**
|
||||
* Default name of the debugging artifact.
|
||||
*/
|
||||
exports.DEFAULT_DEBUG_ARTIFACT_NAME = "debug-artifacts";
|
||||
/**
|
||||
* Default name of the database in the debugging artifact.
|
||||
*/
|
||||
exports.DEFAULT_DEBUG_DATABASE_NAME = "db";
|
||||
/**
|
||||
* Get the extra options for the codeql commands.
|
||||
*/
|
||||
@@ -77,7 +94,7 @@ async function withTmpDir(body) {
|
||||
const symlinkSubdir = path.join(tmpDir, "symlink");
|
||||
fs.symlinkSync(realSubdir, symlinkSubdir, "dir");
|
||||
const result = await body(symlinkSubdir);
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
await (0, del_1.default)(tmpDir, { force: true });
|
||||
return result;
|
||||
}
|
||||
exports.withTmpDir = withTmpDir;
|
||||
@@ -93,13 +110,13 @@ function getSystemReservedMemoryMegaBytes() {
|
||||
return 1024 * (process.platform === "win32" ? 1.5 : 1);
|
||||
}
|
||||
/**
|
||||
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
||||
* specified, the total available memory will be used minus a threshold
|
||||
* reserved for the OS.
|
||||
* Get the value of the codeql `--ram` flag as configured by the `ram` input.
|
||||
* If no value was specified, the total available memory will be used minus a
|
||||
* threshold reserved for the OS.
|
||||
*
|
||||
* @returns string
|
||||
* @returns {number} the amount of RAM to use, in megabytes
|
||||
*/
|
||||
function getMemoryFlag(userInput) {
|
||||
function getMemoryFlagValue(userInput) {
|
||||
let memoryToUseMegaBytes;
|
||||
if (userInput) {
|
||||
memoryToUseMegaBytes = Number(userInput);
|
||||
@@ -113,7 +130,18 @@ function getMemoryFlag(userInput) {
|
||||
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes();
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
|
||||
}
|
||||
return `--ram=${Math.floor(memoryToUseMegaBytes)}`;
|
||||
return Math.floor(memoryToUseMegaBytes);
|
||||
}
|
||||
exports.getMemoryFlagValue = getMemoryFlagValue;
|
||||
/**
|
||||
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
||||
* specified, the total available memory will be used minus a threshold
|
||||
* reserved for the OS.
|
||||
*
|
||||
* @returns string
|
||||
*/
|
||||
function getMemoryFlag(userInput) {
|
||||
return `--ram=${getMemoryFlagValue(userInput)}`;
|
||||
}
|
||||
exports.getMemoryFlag = getMemoryFlag;
|
||||
/**
|
||||
@@ -130,14 +158,14 @@ function getAddSnippetsFlag(userInput) {
|
||||
}
|
||||
exports.getAddSnippetsFlag = getAddSnippetsFlag;
|
||||
/**
|
||||
* Get the codeql `--threads` value specified for the `threads` input.
|
||||
* If no value was specified, all available threads will be used.
|
||||
* Get the value of the codeql `--threads` flag specified for the `threads`
|
||||
* input. If no value was specified, all available threads will be used.
|
||||
*
|
||||
* The value will be capped to the number of available CPUs.
|
||||
*
|
||||
* @returns string
|
||||
* @returns {number}
|
||||
*/
|
||||
function getThreadsFlag(userInput, logger) {
|
||||
function getThreadsFlagValue(userInput, logger) {
|
||||
let numThreads;
|
||||
const maxThreads = os.cpus().length;
|
||||
if (userInput) {
|
||||
@@ -159,7 +187,19 @@ function getThreadsFlag(userInput, logger) {
|
||||
// Default to using all threads
|
||||
numThreads = maxThreads;
|
||||
}
|
||||
return `--threads=${numThreads}`;
|
||||
return numThreads;
|
||||
}
|
||||
exports.getThreadsFlagValue = getThreadsFlagValue;
|
||||
/**
|
||||
* Get the codeql `--threads` flag specified for the `threads` input.
|
||||
* If no value was specified, all available threads will be used.
|
||||
*
|
||||
* The value will be capped to the number of available CPUs.
|
||||
*
|
||||
* @returns string
|
||||
*/
|
||||
function getThreadsFlag(userInput, logger) {
|
||||
return `--threads=${getThreadsFlagValue(userInput, logger)}`;
|
||||
}
|
||||
exports.getThreadsFlag = getThreadsFlag;
|
||||
/**
|
||||
@@ -455,4 +495,34 @@ async function codeQlVersionAbove(codeql, requiredVersion) {
|
||||
return semver.gte(await codeql.getVersion(), requiredVersion);
|
||||
}
|
||||
exports.codeQlVersionAbove = codeQlVersionAbove;
|
||||
// Create a bundle for the given DB, if it doesn't already exist
|
||||
async function bundleDb(config, language, codeql, dbName) {
|
||||
const databasePath = getCodeQLDatabasePath(config, language);
|
||||
const databaseBundlePath = path.resolve(config.dbLocation, `${dbName}.zip`);
|
||||
// For a tiny bit of added safety, delete the file if it exists.
|
||||
// The file is probably from an earlier call to this function, either
|
||||
// as part of this action step or a previous one, but it could also be
|
||||
// from somewhere else or someone trying to make the action upload a
|
||||
// non-database file.
|
||||
if (fs.existsSync(databaseBundlePath)) {
|
||||
await (0, del_1.default)(databaseBundlePath, { force: true });
|
||||
}
|
||||
await codeql.databaseBundle(databasePath, databaseBundlePath, dbName);
|
||||
return databaseBundlePath;
|
||||
}
|
||||
exports.bundleDb = bundleDb;
|
||||
async function delay(milliseconds) {
|
||||
return new Promise((resolve) => setTimeout(resolve, milliseconds));
|
||||
}
|
||||
exports.delay = delay;
|
||||
function isGoodVersion(versionSpec) {
|
||||
return !BROKEN_VERSIONS.includes(versionSpec);
|
||||
}
|
||||
exports.isGoodVersion = isGoodVersion;
|
||||
function checkNotWindows11() {
|
||||
if (os.platform() === "win32" && semver.gte(os.release(), "10.0.20348")) {
|
||||
throw new Error("Tracing builds with CodeQL is currently not supported on Windows 11 and Windows Server 2022. Please modify your Actions workflow to use an earlier version of Windows for this job, for example by setting `runs-on: windows-2019`.");
|
||||
}
|
||||
}
|
||||
exports.checkNotWindows11 = checkNotWindows11;
|
||||
//# sourceMappingURL=util.js.map
|
||||
File diff suppressed because one or more lines are too long
2010
node_modules/.package-lock.json
generated
vendored
2010
node_modules/.package-lock.json
generated
vendored
File diff suppressed because it is too large
Load Diff
125
node_modules/@octokit/endpoint/node_modules/is-plain-object/README.md
generated
vendored
125
node_modules/@octokit/endpoint/node_modules/is-plain-object/README.md
generated
vendored
@@ -1,125 +0,0 @@
|
||||
# is-plain-object [](https://www.npmjs.com/package/is-plain-object) [](https://npmjs.org/package/is-plain-object) [](https://npmjs.org/package/is-plain-object) [](https://travis-ci.org/jonschlinkert/is-plain-object)
|
||||
|
||||
> Returns true if an object was created by the `Object` constructor, or Object.create(null).
|
||||
|
||||
Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support.
|
||||
|
||||
## Install
|
||||
|
||||
Install with [npm](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
$ npm install --save is-plain-object
|
||||
```
|
||||
|
||||
Use [isobject](https://github.com/jonschlinkert/isobject) if you only want to check if the value is an object and not an array or null.
|
||||
|
||||
## Usage
|
||||
|
||||
with es modules
|
||||
```js
|
||||
import { isPlainObject } from 'is-plain-object';
|
||||
```
|
||||
|
||||
or with commonjs
|
||||
```js
|
||||
const { isPlainObject } = require('is-plain-object');
|
||||
```
|
||||
|
||||
**true** when created by the `Object` constructor, or Object.create(null).
|
||||
|
||||
```js
|
||||
isPlainObject(Object.create({}));
|
||||
//=> true
|
||||
isPlainObject(Object.create(Object.prototype));
|
||||
//=> true
|
||||
isPlainObject({foo: 'bar'});
|
||||
//=> true
|
||||
isPlainObject({});
|
||||
//=> true
|
||||
isPlainObject(null);
|
||||
//=> true
|
||||
```
|
||||
|
||||
**false** when not created by the `Object` constructor.
|
||||
|
||||
```js
|
||||
isPlainObject(1);
|
||||
//=> false
|
||||
isPlainObject(['foo', 'bar']);
|
||||
//=> false
|
||||
isPlainObject([]);
|
||||
//=> false
|
||||
isPlainObject(new Foo);
|
||||
//=> false
|
||||
isPlainObject(Object.create(null));
|
||||
//=> false
|
||||
```
|
||||
|
||||
## About
|
||||
|
||||
<details>
|
||||
<summary><strong>Contributing</strong></summary>
|
||||
|
||||
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Running Tests</strong></summary>
|
||||
|
||||
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
|
||||
|
||||
```sh
|
||||
$ npm install && npm test
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Building docs</strong></summary>
|
||||
|
||||
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
|
||||
|
||||
To generate the readme, run the following command:
|
||||
|
||||
```sh
|
||||
$ npm install -g verbose/verb#dev verb-generate-readme && verb
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Related projects
|
||||
|
||||
You might also be interested in these projects:
|
||||
|
||||
* [is-number](https://www.npmjs.com/package/is-number): Returns true if a number or string value is a finite number. Useful for regex… [more](https://github.com/jonschlinkert/is-number) | [homepage](https://github.com/jonschlinkert/is-number "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.")
|
||||
* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.")
|
||||
* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.")
|
||||
|
||||
### Contributors
|
||||
|
||||
| **Commits** | **Contributor** |
|
||||
| --- | --- |
|
||||
| 19 | [jonschlinkert](https://github.com/jonschlinkert) |
|
||||
| 6 | [TrySound](https://github.com/TrySound) |
|
||||
| 6 | [stevenvachon](https://github.com/stevenvachon) |
|
||||
| 3 | [onokumus](https://github.com/onokumus) |
|
||||
| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) |
|
||||
|
||||
### Author
|
||||
|
||||
**Jon Schlinkert**
|
||||
|
||||
* [GitHub Profile](https://github.com/jonschlinkert)
|
||||
* [Twitter Profile](https://twitter.com/jonschlinkert)
|
||||
* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert)
|
||||
|
||||
### License
|
||||
|
||||
Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert).
|
||||
Released under the [MIT License](LICENSE).
|
||||
|
||||
***
|
||||
|
||||
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 28, 2019._
|
||||
85
node_modules/@octokit/endpoint/node_modules/is-plain-object/package.json
generated
vendored
85
node_modules/@octokit/endpoint/node_modules/is-plain-object/package.json
generated
vendored
@@ -1,85 +0,0 @@
|
||||
{
|
||||
"name": "is-plain-object",
|
||||
"description": "Returns true if an object was created by the `Object` constructor, or Object.create(null).",
|
||||
"version": "5.0.0",
|
||||
"homepage": "https://github.com/jonschlinkert/is-plain-object",
|
||||
"author": "Jon Schlinkert (https://github.com/jonschlinkert)",
|
||||
"contributors": [
|
||||
"Jon Schlinkert (http://twitter.com/jonschlinkert)",
|
||||
"Osman Nuri Okumuş (http://onokumus.com)",
|
||||
"Steven Vachon (https://svachon.com)",
|
||||
"(https://github.com/wtgtybhertgeghgtwtg)",
|
||||
"Bogdan Chadkin (https://github.com/TrySound)"
|
||||
],
|
||||
"repository": "jonschlinkert/is-plain-object",
|
||||
"bugs": {
|
||||
"url": "https://github.com/jonschlinkert/is-plain-object/issues"
|
||||
},
|
||||
"license": "MIT",
|
||||
"main": "dist/is-plain-object.js",
|
||||
"module": "dist/is-plain-object.mjs",
|
||||
"types": "is-plain-object.d.ts",
|
||||
"files": [
|
||||
"is-plain-object.d.ts",
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/is-plain-object.mjs",
|
||||
"require": "./dist/is-plain-object.js"
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "rollup -c",
|
||||
"test_browser": "mocha-headless-chrome --args=disable-web-security -f test/browser.html",
|
||||
"test_node": "mocha -r esm",
|
||||
"test": "npm run test_node && npm run build && npm run test_browser",
|
||||
"prepare": "rollup -c"
|
||||
},
|
||||
"devDependencies": {
|
||||
"chai": "^4.2.0",
|
||||
"esm": "^3.2.22",
|
||||
"gulp-format-md": "^1.0.0",
|
||||
"mocha": "^6.1.4",
|
||||
"mocha-headless-chrome": "^3.1.0",
|
||||
"rollup": "^2.22.1"
|
||||
},
|
||||
"keywords": [
|
||||
"check",
|
||||
"is",
|
||||
"is-object",
|
||||
"isobject",
|
||||
"javascript",
|
||||
"kind",
|
||||
"kind-of",
|
||||
"object",
|
||||
"plain",
|
||||
"type",
|
||||
"typeof",
|
||||
"value"
|
||||
],
|
||||
"verb": {
|
||||
"toc": false,
|
||||
"layout": "default",
|
||||
"tasks": [
|
||||
"readme"
|
||||
],
|
||||
"plugins": [
|
||||
"gulp-format-md"
|
||||
],
|
||||
"related": {
|
||||
"list": [
|
||||
"is-number",
|
||||
"isobject",
|
||||
"kind-of"
|
||||
]
|
||||
},
|
||||
"lint": {
|
||||
"reflinks": true
|
||||
}
|
||||
}
|
||||
}
|
||||
125
node_modules/@octokit/request/node_modules/is-plain-object/README.md
generated
vendored
125
node_modules/@octokit/request/node_modules/is-plain-object/README.md
generated
vendored
@@ -1,125 +0,0 @@
|
||||
# is-plain-object [](https://www.npmjs.com/package/is-plain-object) [](https://npmjs.org/package/is-plain-object) [](https://npmjs.org/package/is-plain-object) [](https://travis-ci.org/jonschlinkert/is-plain-object)
|
||||
|
||||
> Returns true if an object was created by the `Object` constructor, or Object.create(null).
|
||||
|
||||
Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support.
|
||||
|
||||
## Install
|
||||
|
||||
Install with [npm](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
$ npm install --save is-plain-object
|
||||
```
|
||||
|
||||
Use [isobject](https://github.com/jonschlinkert/isobject) if you only want to check if the value is an object and not an array or null.
|
||||
|
||||
## Usage
|
||||
|
||||
with es modules
|
||||
```js
|
||||
import { isPlainObject } from 'is-plain-object';
|
||||
```
|
||||
|
||||
or with commonjs
|
||||
```js
|
||||
const { isPlainObject } = require('is-plain-object');
|
||||
```
|
||||
|
||||
**true** when created by the `Object` constructor, or Object.create(null).
|
||||
|
||||
```js
|
||||
isPlainObject(Object.create({}));
|
||||
//=> true
|
||||
isPlainObject(Object.create(Object.prototype));
|
||||
//=> true
|
||||
isPlainObject({foo: 'bar'});
|
||||
//=> true
|
||||
isPlainObject({});
|
||||
//=> true
|
||||
isPlainObject(null);
|
||||
//=> true
|
||||
```
|
||||
|
||||
**false** when not created by the `Object` constructor.
|
||||
|
||||
```js
|
||||
isPlainObject(1);
|
||||
//=> false
|
||||
isPlainObject(['foo', 'bar']);
|
||||
//=> false
|
||||
isPlainObject([]);
|
||||
//=> false
|
||||
isPlainObject(new Foo);
|
||||
//=> false
|
||||
isPlainObject(Object.create(null));
|
||||
//=> false
|
||||
```
|
||||
|
||||
## About
|
||||
|
||||
<details>
|
||||
<summary><strong>Contributing</strong></summary>
|
||||
|
||||
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Running Tests</strong></summary>
|
||||
|
||||
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
|
||||
|
||||
```sh
|
||||
$ npm install && npm test
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Building docs</strong></summary>
|
||||
|
||||
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
|
||||
|
||||
To generate the readme, run the following command:
|
||||
|
||||
```sh
|
||||
$ npm install -g verbose/verb#dev verb-generate-readme && verb
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Related projects
|
||||
|
||||
You might also be interested in these projects:
|
||||
|
||||
* [is-number](https://www.npmjs.com/package/is-number): Returns true if a number or string value is a finite number. Useful for regex… [more](https://github.com/jonschlinkert/is-number) | [homepage](https://github.com/jonschlinkert/is-number "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.")
|
||||
* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.")
|
||||
* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.")
|
||||
|
||||
### Contributors
|
||||
|
||||
| **Commits** | **Contributor** |
|
||||
| --- | --- |
|
||||
| 19 | [jonschlinkert](https://github.com/jonschlinkert) |
|
||||
| 6 | [TrySound](https://github.com/TrySound) |
|
||||
| 6 | [stevenvachon](https://github.com/stevenvachon) |
|
||||
| 3 | [onokumus](https://github.com/onokumus) |
|
||||
| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) |
|
||||
|
||||
### Author
|
||||
|
||||
**Jon Schlinkert**
|
||||
|
||||
* [GitHub Profile](https://github.com/jonschlinkert)
|
||||
* [Twitter Profile](https://twitter.com/jonschlinkert)
|
||||
* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert)
|
||||
|
||||
### License
|
||||
|
||||
Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert).
|
||||
Released under the [MIT License](LICENSE).
|
||||
|
||||
***
|
||||
|
||||
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 28, 2019._
|
||||
38
node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.js
generated
vendored
38
node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.js
generated
vendored
@@ -1,38 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
/*!
|
||||
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
|
||||
*
|
||||
* Copyright (c) 2014-2017, Jon Schlinkert.
|
||||
* Released under the MIT License.
|
||||
*/
|
||||
|
||||
function isObject(o) {
|
||||
return Object.prototype.toString.call(o) === '[object Object]';
|
||||
}
|
||||
|
||||
function isPlainObject(o) {
|
||||
var ctor,prot;
|
||||
|
||||
if (isObject(o) === false) return false;
|
||||
|
||||
// If has modified constructor
|
||||
ctor = o.constructor;
|
||||
if (ctor === undefined) return true;
|
||||
|
||||
// If has modified prototype
|
||||
prot = ctor.prototype;
|
||||
if (isObject(prot) === false) return false;
|
||||
|
||||
// If constructor does not have an Object-specific method
|
||||
if (prot.hasOwnProperty('isPrototypeOf') === false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Most likely a plain Object
|
||||
return true;
|
||||
}
|
||||
|
||||
exports.isPlainObject = isPlainObject;
|
||||
34
node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.mjs
generated
vendored
34
node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.mjs
generated
vendored
@@ -1,34 +0,0 @@
|
||||
/*!
|
||||
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
|
||||
*
|
||||
* Copyright (c) 2014-2017, Jon Schlinkert.
|
||||
* Released under the MIT License.
|
||||
*/
|
||||
|
||||
function isObject(o) {
|
||||
return Object.prototype.toString.call(o) === '[object Object]';
|
||||
}
|
||||
|
||||
function isPlainObject(o) {
|
||||
var ctor,prot;
|
||||
|
||||
if (isObject(o) === false) return false;
|
||||
|
||||
// If has modified constructor
|
||||
ctor = o.constructor;
|
||||
if (ctor === undefined) return true;
|
||||
|
||||
// If has modified prototype
|
||||
prot = ctor.prototype;
|
||||
if (isObject(prot) === false) return false;
|
||||
|
||||
// If constructor does not have an Object-specific method
|
||||
if (prot.hasOwnProperty('isPrototypeOf') === false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Most likely a plain Object
|
||||
return true;
|
||||
}
|
||||
|
||||
export { isPlainObject };
|
||||
1
node_modules/@octokit/request/node_modules/is-plain-object/is-plain-object.d.ts
generated
vendored
1
node_modules/@octokit/request/node_modules/is-plain-object/is-plain-object.d.ts
generated
vendored
@@ -1 +0,0 @@
|
||||
export function isPlainObject(o: any): boolean;
|
||||
85
node_modules/@octokit/request/node_modules/is-plain-object/package.json
generated
vendored
85
node_modules/@octokit/request/node_modules/is-plain-object/package.json
generated
vendored
@@ -1,85 +0,0 @@
|
||||
{
|
||||
"name": "is-plain-object",
|
||||
"description": "Returns true if an object was created by the `Object` constructor, or Object.create(null).",
|
||||
"version": "5.0.0",
|
||||
"homepage": "https://github.com/jonschlinkert/is-plain-object",
|
||||
"author": "Jon Schlinkert (https://github.com/jonschlinkert)",
|
||||
"contributors": [
|
||||
"Jon Schlinkert (http://twitter.com/jonschlinkert)",
|
||||
"Osman Nuri Okumuş (http://onokumus.com)",
|
||||
"Steven Vachon (https://svachon.com)",
|
||||
"(https://github.com/wtgtybhertgeghgtwtg)",
|
||||
"Bogdan Chadkin (https://github.com/TrySound)"
|
||||
],
|
||||
"repository": "jonschlinkert/is-plain-object",
|
||||
"bugs": {
|
||||
"url": "https://github.com/jonschlinkert/is-plain-object/issues"
|
||||
},
|
||||
"license": "MIT",
|
||||
"main": "dist/is-plain-object.js",
|
||||
"module": "dist/is-plain-object.mjs",
|
||||
"types": "is-plain-object.d.ts",
|
||||
"files": [
|
||||
"is-plain-object.d.ts",
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/is-plain-object.mjs",
|
||||
"require": "./dist/is-plain-object.js"
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "rollup -c",
|
||||
"test_browser": "mocha-headless-chrome --args=disable-web-security -f test/browser.html",
|
||||
"test_node": "mocha -r esm",
|
||||
"test": "npm run test_node && npm run build && npm run test_browser",
|
||||
"prepare": "rollup -c"
|
||||
},
|
||||
"devDependencies": {
|
||||
"chai": "^4.2.0",
|
||||
"esm": "^3.2.22",
|
||||
"gulp-format-md": "^1.0.0",
|
||||
"mocha": "^6.1.4",
|
||||
"mocha-headless-chrome": "^3.1.0",
|
||||
"rollup": "^2.22.1"
|
||||
},
|
||||
"keywords": [
|
||||
"check",
|
||||
"is",
|
||||
"is-object",
|
||||
"isobject",
|
||||
"javascript",
|
||||
"kind",
|
||||
"kind-of",
|
||||
"object",
|
||||
"plain",
|
||||
"type",
|
||||
"typeof",
|
||||
"value"
|
||||
],
|
||||
"verb": {
|
||||
"toc": false,
|
||||
"layout": "default",
|
||||
"tasks": [
|
||||
"readme"
|
||||
],
|
||||
"plugins": [
|
||||
"gulp-format-md"
|
||||
],
|
||||
"related": {
|
||||
"list": [
|
||||
"is-number",
|
||||
"isobject",
|
||||
"kind-of"
|
||||
]
|
||||
},
|
||||
"lint": {
|
||||
"reflinks": true
|
||||
}
|
||||
}
|
||||
}
|
||||
0
node_modules/@types/events/LICENSE → node_modules/@types/del/LICENSE
generated
vendored
0
node_modules/@types/events/LICENSE → node_modules/@types/del/LICENSE
generated
vendored
3
node_modules/@types/del/README.md
generated
vendored
Normal file
3
node_modules/@types/del/README.md
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
This is a stub types definition for del (https://github.com/sindresorhus/del).
|
||||
|
||||
del provides its own type definitions, so you don't need @types/del installed!
|
||||
14
node_modules/@types/del/package.json
generated
vendored
Normal file
14
node_modules/@types/del/package.json
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "@types/del",
|
||||
"version": "4.0.0",
|
||||
"typings": null,
|
||||
"description": "Stub TypeScript definitions entry for del, which provides its own types definitions",
|
||||
"main": "",
|
||||
"scripts": {},
|
||||
"author": "",
|
||||
"repository": "https://github.com/sindresorhus/del",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"del": "*"
|
||||
}
|
||||
}
|
||||
16
node_modules/@types/events/README.md
generated
vendored
16
node_modules/@types/events/README.md
generated
vendored
@@ -1,16 +0,0 @@
|
||||
# Installation
|
||||
> `npm install --save @types/events`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for events (https://github.com/Gozala/events).
|
||||
|
||||
# Details
|
||||
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/events
|
||||
|
||||
Additional Details
|
||||
* Last updated: Thu, 24 Jan 2019 03:19:08 GMT
|
||||
* Dependencies: none
|
||||
* Global values: none
|
||||
|
||||
# Credits
|
||||
These definitions were written by Yasunori Ohoka <https://github.com/yasupeke>, Shenwei Wang <https://github.com/weareoutman>.
|
||||
28
node_modules/@types/events/index.d.ts
generated
vendored
28
node_modules/@types/events/index.d.ts
generated
vendored
@@ -1,28 +0,0 @@
|
||||
// Type definitions for events 3.0
|
||||
// Project: https://github.com/Gozala/events
|
||||
// Definitions by: Yasunori Ohoka <https://github.com/yasupeke>
|
||||
// Shenwei Wang <https://github.com/weareoutman>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
export type Listener = (...args: any[]) => void;
|
||||
|
||||
export class EventEmitter {
|
||||
static listenerCount(emitter: EventEmitter, type: string | number): number;
|
||||
static defaultMaxListeners: number;
|
||||
|
||||
eventNames(): Array<string | number>;
|
||||
setMaxListeners(n: number): this;
|
||||
getMaxListeners(): number;
|
||||
emit(type: string | number, ...args: any[]): boolean;
|
||||
addListener(type: string | number, listener: Listener): this;
|
||||
on(type: string | number, listener: Listener): this;
|
||||
once(type: string | number, listener: Listener): this;
|
||||
prependListener(type: string | number, listener: Listener): this;
|
||||
prependOnceListener(type: string | number, listener: Listener): this;
|
||||
removeListener(type: string | number, listener: Listener): this;
|
||||
off(type: string | number, listener: Listener): this;
|
||||
removeAllListeners(type?: string | number): this;
|
||||
listeners(type: string | number): Listener[];
|
||||
listenerCount(type: string | number): number;
|
||||
rawListeners(type: string | number): Listener[];
|
||||
}
|
||||
28
node_modules/@types/events/package.json
generated
vendored
28
node_modules/@types/events/package.json
generated
vendored
@@ -1,28 +0,0 @@
|
||||
{
|
||||
"name": "@types/events",
|
||||
"version": "3.0.0",
|
||||
"description": "TypeScript definitions for events",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Yasunori Ohoka",
|
||||
"url": "https://github.com/yasupeke",
|
||||
"githubUsername": "yasupeke"
|
||||
},
|
||||
{
|
||||
"name": "Shenwei Wang",
|
||||
"url": "https://github.com/weareoutman",
|
||||
"githubUsername": "weareoutman"
|
||||
}
|
||||
],
|
||||
"main": "",
|
||||
"types": "index",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {},
|
||||
"typesPublisherContentHash": "ae078136220837864b64cc7c1c5267ca1ceb809166fb74569e637bc7de9f2e12",
|
||||
"typeScriptVersion": "2.0"
|
||||
}
|
||||
21
node_modules/@types/glob/LICENSE
generated
vendored
21
node_modules/@types/glob/LICENSE
generated
vendored
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
16
node_modules/@types/glob/README.md
generated
vendored
16
node_modules/@types/glob/README.md
generated
vendored
@@ -1,16 +0,0 @@
|
||||
# Installation
|
||||
> `npm install --save @types/glob`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for Glob (https://github.com/isaacs/node-glob).
|
||||
|
||||
# Details
|
||||
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/glob
|
||||
|
||||
Additional Details
|
||||
* Last updated: Thu, 27 Sep 2018 12:34:19 GMT
|
||||
* Dependencies: events, minimatch, node
|
||||
* Global values: none
|
||||
|
||||
# Credits
|
||||
These definitions were written by vvakame <https://github.com/vvakame>, voy <https://github.com/voy>, Klaus Meinhardt <https://github.com/ajafff>.
|
||||
87
node_modules/@types/glob/index.d.ts
generated
vendored
87
node_modules/@types/glob/index.d.ts
generated
vendored
@@ -1,87 +0,0 @@
|
||||
// Type definitions for Glob 7.1
|
||||
// Project: https://github.com/isaacs/node-glob
|
||||
// Definitions by: vvakame <https://github.com/vvakame>
|
||||
// voy <https://github.com/voy>
|
||||
// Klaus Meinhardt <https://github.com/ajafff>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
/// <reference types="node" />
|
||||
|
||||
import events = require("events");
|
||||
import minimatch = require("minimatch");
|
||||
|
||||
declare function G(pattern: string, cb: (err: Error | null, matches: string[]) => void): void;
|
||||
declare function G(pattern: string, options: G.IOptions, cb: (err: Error | null, matches: string[]) => void): void;
|
||||
|
||||
declare namespace G {
|
||||
function __promisify__(pattern: string, options?: IOptions): Promise<string[]>;
|
||||
|
||||
function sync(pattern: string, options?: IOptions): string[];
|
||||
|
||||
function hasMagic(pattern: string, options?: IOptions): boolean;
|
||||
|
||||
let Glob: IGlobStatic;
|
||||
let GlobSync: IGlobSyncStatic;
|
||||
|
||||
interface IOptions extends minimatch.IOptions {
|
||||
cwd?: string;
|
||||
root?: string;
|
||||
dot?: boolean;
|
||||
nomount?: boolean;
|
||||
mark?: boolean;
|
||||
nosort?: boolean;
|
||||
stat?: boolean;
|
||||
silent?: boolean;
|
||||
strict?: boolean;
|
||||
cache?: { [path: string]: boolean | 'DIR' | 'FILE' | ReadonlyArray<string> };
|
||||
statCache?: { [path: string]: false | { isDirectory(): boolean} | undefined };
|
||||
symlinks?: { [path: string]: boolean | undefined };
|
||||
realpathCache?: { [path: string]: string };
|
||||
sync?: boolean;
|
||||
nounique?: boolean;
|
||||
nonull?: boolean;
|
||||
debug?: boolean;
|
||||
nobrace?: boolean;
|
||||
noglobstar?: boolean;
|
||||
noext?: boolean;
|
||||
nocase?: boolean;
|
||||
matchBase?: any;
|
||||
nodir?: boolean;
|
||||
ignore?: string | ReadonlyArray<string>;
|
||||
follow?: boolean;
|
||||
realpath?: boolean;
|
||||
nonegate?: boolean;
|
||||
nocomment?: boolean;
|
||||
absolute?: boolean;
|
||||
}
|
||||
|
||||
interface IGlobStatic extends events.EventEmitter {
|
||||
new (pattern: string, cb?: (err: Error | null, matches: string[]) => void): IGlob;
|
||||
new (pattern: string, options: IOptions, cb?: (err: Error | null, matches: string[]) => void): IGlob;
|
||||
prototype: IGlob;
|
||||
}
|
||||
|
||||
interface IGlobSyncStatic {
|
||||
new (pattern: string, options?: IOptions): IGlobBase;
|
||||
prototype: IGlobBase;
|
||||
}
|
||||
|
||||
interface IGlobBase {
|
||||
minimatch: minimatch.IMinimatch;
|
||||
options: IOptions;
|
||||
aborted: boolean;
|
||||
cache: { [path: string]: boolean | 'DIR' | 'FILE' | ReadonlyArray<string> };
|
||||
statCache: { [path: string]: false | { isDirectory(): boolean; } | undefined };
|
||||
symlinks: { [path: string]: boolean | undefined };
|
||||
realpathCache: { [path: string]: string };
|
||||
found: string[];
|
||||
}
|
||||
|
||||
interface IGlob extends IGlobBase, events.EventEmitter {
|
||||
pause(): void;
|
||||
resume(): void;
|
||||
abort(): void;
|
||||
}
|
||||
}
|
||||
|
||||
export = G;
|
||||
36
node_modules/@types/glob/package.json
generated
vendored
36
node_modules/@types/glob/package.json
generated
vendored
@@ -1,36 +0,0 @@
|
||||
{
|
||||
"name": "@types/glob",
|
||||
"version": "7.1.1",
|
||||
"description": "TypeScript definitions for Glob",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "vvakame",
|
||||
"url": "https://github.com/vvakame",
|
||||
"githubUsername": "vvakame"
|
||||
},
|
||||
{
|
||||
"name": "voy",
|
||||
"url": "https://github.com/voy",
|
||||
"githubUsername": "voy"
|
||||
},
|
||||
{
|
||||
"name": "Klaus Meinhardt",
|
||||
"url": "https://github.com/ajafff",
|
||||
"githubUsername": "ajafff"
|
||||
}
|
||||
],
|
||||
"main": "",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {
|
||||
"@types/events": "*",
|
||||
"@types/minimatch": "*",
|
||||
"@types/node": "*"
|
||||
},
|
||||
"typesPublisherContentHash": "43019f2af91c7a4ca3453c4b806a01c521ca3008ffe1bfefd37c5f9d6135660e",
|
||||
"typeScriptVersion": "2.0"
|
||||
}
|
||||
21
node_modules/@types/minimatch/LICENSE
generated
vendored
21
node_modules/@types/minimatch/LICENSE
generated
vendored
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
16
node_modules/@types/minimatch/README.md
generated
vendored
16
node_modules/@types/minimatch/README.md
generated
vendored
@@ -1,16 +0,0 @@
|
||||
# Installation
|
||||
> `npm install --save @types/minimatch`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for Minimatch (https://github.com/isaacs/minimatch).
|
||||
|
||||
# Details
|
||||
Files were exported from https://www.github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/minimatch
|
||||
|
||||
Additional Details
|
||||
* Last updated: Thu, 04 Jan 2018 23:26:01 GMT
|
||||
* Dependencies: none
|
||||
* Global values: none
|
||||
|
||||
# Credits
|
||||
These definitions were written by vvakame <https://github.com/vvakame>, Shant Marouti <https://github.com/shantmarouti>.
|
||||
214
node_modules/@types/minimatch/index.d.ts
generated
vendored
214
node_modules/@types/minimatch/index.d.ts
generated
vendored
@@ -1,214 +0,0 @@
|
||||
// Type definitions for Minimatch 3.0
|
||||
// Project: https://github.com/isaacs/minimatch
|
||||
// Definitions by: vvakame <https://github.com/vvakame>
|
||||
// Shant Marouti <https://github.com/shantmarouti>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
/**
|
||||
* Tests a path against the pattern using the options.
|
||||
*/
|
||||
declare function M(target: string, pattern: string, options?: M.IOptions): boolean;
|
||||
|
||||
declare namespace M {
|
||||
/**
|
||||
* Match against the list of files, in the style of fnmatch or glob.
|
||||
* If nothing is matched, and options.nonull is set,
|
||||
* then return a list containing the pattern itself.
|
||||
*/
|
||||
function match(list: ReadonlyArray<string>, pattern: string, options?: IOptions): string[];
|
||||
|
||||
/**
|
||||
* Returns a function that tests its supplied argument, suitable for use with Array.filter
|
||||
*/
|
||||
function filter(pattern: string, options?: IOptions): (element: string, indexed: number, array: ReadonlyArray<string>) => boolean;
|
||||
|
||||
/**
|
||||
* Make a regular expression object from the pattern.
|
||||
*/
|
||||
function makeRe(pattern: string, options?: IOptions): RegExp;
|
||||
|
||||
let Minimatch: IMinimatchStatic;
|
||||
|
||||
interface IOptions {
|
||||
/**
|
||||
* Dump a ton of stuff to stderr.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
debug?: boolean;
|
||||
|
||||
/**
|
||||
* Do not expand {a,b} and {1..3} brace sets.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nobrace?: boolean;
|
||||
|
||||
/**
|
||||
* Disable ** matching against multiple folder names.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
noglobstar?: boolean;
|
||||
|
||||
/**
|
||||
* Allow patterns to match filenames starting with a period,
|
||||
* even if the pattern does not explicitly have a period in that spot.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
dot?: boolean;
|
||||
|
||||
/**
|
||||
* Disable "extglob" style patterns like +(a|b).
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
noext?: boolean;
|
||||
|
||||
/**
|
||||
* Perform a case-insensitive match.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nocase?: boolean;
|
||||
|
||||
/**
|
||||
* When a match is not found by minimatch.match,
|
||||
* return a list containing the pattern itself if this option is set.
|
||||
* Otherwise, an empty list is returned if there are no matches.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nonull?: boolean;
|
||||
|
||||
/**
|
||||
* If set, then patterns without slashes will be matched against
|
||||
* the basename of the path if it contains slashes.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
matchBase?: boolean;
|
||||
|
||||
/**
|
||||
* Suppress the behavior of treating #
|
||||
* at the start of a pattern as a comment.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nocomment?: boolean;
|
||||
|
||||
/**
|
||||
* Suppress the behavior of treating a leading ! character as negation.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nonegate?: boolean;
|
||||
|
||||
/**
|
||||
* Returns from negate expressions the same as if they were not negated.
|
||||
* (Ie, true on a hit, false on a miss.)
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
flipNegate?: boolean;
|
||||
}
|
||||
|
||||
interface IMinimatchStatic {
|
||||
new(pattern: string, options?: IOptions): IMinimatch;
|
||||
prototype: IMinimatch;
|
||||
}
|
||||
|
||||
interface IMinimatch {
|
||||
/**
|
||||
* The original pattern the minimatch object represents.
|
||||
*/
|
||||
pattern: string;
|
||||
|
||||
/**
|
||||
* The options supplied to the constructor.
|
||||
*/
|
||||
options: IOptions;
|
||||
|
||||
/**
|
||||
* A 2-dimensional array of regexp or string expressions.
|
||||
*/
|
||||
set: any[][]; // (RegExp | string)[][]
|
||||
|
||||
/**
|
||||
* A single regular expression expressing the entire pattern.
|
||||
* Created by the makeRe method.
|
||||
*/
|
||||
regexp: RegExp;
|
||||
|
||||
/**
|
||||
* True if the pattern is negated.
|
||||
*/
|
||||
negate: boolean;
|
||||
|
||||
/**
|
||||
* True if the pattern is a comment.
|
||||
*/
|
||||
comment: boolean;
|
||||
|
||||
/**
|
||||
* True if the pattern is ""
|
||||
*/
|
||||
empty: boolean;
|
||||
|
||||
/**
|
||||
* Generate the regexp member if necessary, and return it.
|
||||
* Will return false if the pattern is invalid.
|
||||
*/
|
||||
makeRe(): RegExp; // regexp or boolean
|
||||
|
||||
/**
|
||||
* Return true if the filename matches the pattern, or false otherwise.
|
||||
*/
|
||||
match(fname: string): boolean;
|
||||
|
||||
/**
|
||||
* Take a /-split filename, and match it against a single row in the regExpSet.
|
||||
* This method is mainly for internal use, but is exposed so that it can be used
|
||||
* by a glob-walker that needs to avoid excessive filesystem calls.
|
||||
*/
|
||||
matchOne(files: string[], pattern: string[], partial: boolean): boolean;
|
||||
|
||||
/**
|
||||
* Deprecated. For internal use.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
debug(): void;
|
||||
|
||||
/**
|
||||
* Deprecated. For internal use.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
make(): void;
|
||||
|
||||
/**
|
||||
* Deprecated. For internal use.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
parseNegate(): void;
|
||||
|
||||
/**
|
||||
* Deprecated. For internal use.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
braceExpand(pattern: string, options: IOptions): void;
|
||||
|
||||
/**
|
||||
* Deprecated. For internal use.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
parse(pattern: string, isSub?: boolean): void;
|
||||
}
|
||||
}
|
||||
|
||||
export = M;
|
||||
27
node_modules/@types/minimatch/package.json
generated
vendored
27
node_modules/@types/minimatch/package.json
generated
vendored
@@ -1,27 +0,0 @@
|
||||
{
|
||||
"name": "@types/minimatch",
|
||||
"version": "3.0.3",
|
||||
"description": "TypeScript definitions for Minimatch",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "vvakame",
|
||||
"url": "https://github.com/vvakame",
|
||||
"githubUsername": "vvakame"
|
||||
},
|
||||
{
|
||||
"name": "Shant Marouti",
|
||||
"url": "https://github.com/shantmarouti",
|
||||
"githubUsername": "shantmarouti"
|
||||
}
|
||||
],
|
||||
"main": "",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://www.github.com/DefinitelyTyped/DefinitelyTyped.git"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {},
|
||||
"typesPublisherContentHash": "e768e36348874adcc93ac67e9c3c7b5fcbd39079c0610ec16e410b8f851308d1",
|
||||
"typeScriptVersion": "2.0"
|
||||
}
|
||||
0
node_modules/@types/node/LICENSE
generated
vendored
Executable file → Normal file
0
node_modules/@types/node/LICENSE
generated
vendored
Executable file → Normal file
8
node_modules/@types/node/README.md
generated
vendored
Executable file → Normal file
8
node_modules/@types/node/README.md
generated
vendored
Executable file → Normal file
@@ -5,12 +5,12 @@
|
||||
This package contains type definitions for Node.js (http://nodejs.org/).
|
||||
|
||||
# Details
|
||||
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node.
|
||||
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node/v12.
|
||||
|
||||
### Additional Details
|
||||
* Last updated: Wed, 28 Jul 2021 19:31:18 GMT
|
||||
* Last updated: Wed, 21 Oct 2020 17:47:46 GMT
|
||||
* Dependencies: none
|
||||
* Global values: `AbortController`, `AbortSignal`, `__dirname`, `__filename`, `console`, `exports`, `gc`, `global`, `module`, `process`, `require`
|
||||
* Global values: `Buffer`, `NodeJS`, `__dirname`, `__filename`, `clearImmediate`, `clearInterval`, `clearTimeout`, `console`, `exports`, `global`, `module`, `process`, `queueMicrotask`, `require`, `setImmediate`, `setInterval`, `setTimeout`
|
||||
|
||||
# Credits
|
||||
These definitions were written by [Microsoft TypeScript](https://github.com/Microsoft), [DefinitelyTyped](https://github.com/DefinitelyTyped), [Alberto Schiabel](https://github.com/jkomyno), [Alvis HT Tang](https://github.com/alvis), [Andrew Makarov](https://github.com/r3nya), [Benjamin Toueg](https://github.com/btoueg), [Chigozirim C.](https://github.com/smac89), [David Junger](https://github.com/touffy), [Deividas Bakanas](https://github.com/DeividasBakanas), [Eugene Y. Q. Shen](https://github.com/eyqs), [Hannes Magnusson](https://github.com/Hannes-Magnusson-CK), [Hoàng Văn Khải](https://github.com/KSXGitHub), [Huw](https://github.com/hoo29), [Kelvin Jin](https://github.com/kjin), [Klaus Meinhardt](https://github.com/ajafff), [Lishude](https://github.com/islishude), [Mariusz Wiktorczyk](https://github.com/mwiktorczyk), [Mohsen Azimi](https://github.com/mohsen1), [Nicolas Even](https://github.com/n-e), [Nikita Galkin](https://github.com/galkin), [Parambir Singh](https://github.com/parambirs), [Sebastian Silbermann](https://github.com/eps1lon), [Simon Schick](https://github.com/SimonSchick), [Thomas den Hollander](https://github.com/ThomasdenH), [Wilco Bakker](https://github.com/WilcoBakker), [wwwy3y3](https://github.com/wwwy3y3), [Samuel Ainsworth](https://github.com/samuela), [Kyle Uehlein](https://github.com/kuehlein), [Thanik Bhongbhibhat](https://github.com/bhongy), [Marcin Kopacz](https://github.com/chyzwar), [Trivikram Kamat](https://github.com/trivikr), [Minh Son Nguyen](https://github.com/nguymin4), [Junxiao Shi](https://github.com/yoursunny), [Ilia Baryshnikov](https://github.com/qwelias), [ExE Boss](https://github.com/ExE-Boss), [Surasak Chaisurin](https://github.com/Ryan-Willpower), [Piotr Błażejewicz](https://github.com/peterblazejewicz), [Anna Henningsen](https://github.com/addaleax), [Jason Kwok](https://github.com/JasonHK), [Victor Perin](https://github.com/victorperin), [Yongsheng Zhang](https://github.com/ZYSzys), and [NodeJS Contributors](https://github.com/NodeJS).
|
||||
These definitions were written by [Microsoft TypeScript](https://github.com/Microsoft), [DefinitelyTyped](https://github.com/DefinitelyTyped), [Alberto Schiabel](https://github.com/jkomyno), [Alexander T.](https://github.com/a-tarasyuk), [Alvis HT Tang](https://github.com/alvis), [Andrew Makarov](https://github.com/r3nya), [Benjamin Toueg](https://github.com/btoueg), [Bruno Scheufler](https://github.com/brunoscheufler), [Chigozirim C.](https://github.com/smac89), [David Junger](https://github.com/touffy), [Deividas Bakanas](https://github.com/DeividasBakanas), [Eugene Y. Q. Shen](https://github.com/eyqs), [Flarna](https://github.com/Flarna), [Hannes Magnusson](https://github.com/Hannes-Magnusson-CK), [Hoàng Văn Khải](https://github.com/KSXGitHub), [Huw](https://github.com/hoo29), [Kelvin Jin](https://github.com/kjin), [Klaus Meinhardt](https://github.com/ajafff), [Lishude](https://github.com/islishude), [Mariusz Wiktorczyk](https://github.com/mwiktorczyk), [Mohsen Azimi](https://github.com/mohsen1), [Nicolas Even](https://github.com/n-e), [Nikita Galkin](https://github.com/galkin), [Parambir Singh](https://github.com/parambirs), [Sebastian Silbermann](https://github.com/eps1lon), [Simon Schick](https://github.com/SimonSchick), [Thomas den Hollander](https://github.com/ThomasdenH), [Wilco Bakker](https://github.com/WilcoBakker), [wwwy3y3](https://github.com/wwwy3y3), [Zane Hannan AU](https://github.com/ZaneHannanAU), [Samuel Ainsworth](https://github.com/samuela), [Kyle Uehlein](https://github.com/kuehlein), [Jordi Oliveras Rovira](https://github.com/j-oliveras), [Thanik Bhongbhibhat](https://github.com/bhongy), [Marcin Kopacz](https://github.com/chyzwar), [Trivikram Kamat](https://github.com/trivikr), [Minh Son Nguyen](https://github.com/nguymin4), [Junxiao Shi](https://github.com/yoursunny), [Ilia Baryshnikov](https://github.com/qwelias), [ExE Boss](https://github.com/ExE-Boss), and [Jason Kwok](https://github.com/JasonHK).
|
||||
|
||||
1492
node_modules/@types/node/assert.d.ts
generated
vendored
Executable file → Normal file
1492
node_modules/@types/node/assert.d.ts
generated
vendored
Executable file → Normal file
File diff suppressed because it is too large
Load Diff
8
node_modules/@types/node/assert/strict.d.ts
generated
vendored
8
node_modules/@types/node/assert/strict.d.ts
generated
vendored
@@ -1,8 +0,0 @@
|
||||
declare module 'assert/strict' {
|
||||
import { strict } from 'node:assert';
|
||||
export = strict;
|
||||
}
|
||||
declare module 'node:assert/strict' {
|
||||
import { strict } from 'node:assert';
|
||||
export = strict;
|
||||
}
|
||||
498
node_modules/@types/node/async_hooks.d.ts
generated
vendored
Executable file → Normal file
498
node_modules/@types/node/async_hooks.d.ts
generated
vendored
Executable file → Normal file
@@ -1,47 +1,16 @@
|
||||
/**
|
||||
* The `async_hooks` module provides an API to track asynchronous resources. It
|
||||
* can be accessed using:
|
||||
*
|
||||
* ```js
|
||||
* const async_hooks = require('async_hooks');
|
||||
* ```
|
||||
* @experimental
|
||||
* @see [source](https://github.com/nodejs/node/blob/v16.4.2/lib/async_hooks.js)
|
||||
* Async Hooks module: https://nodejs.org/api/async_hooks.html
|
||||
*/
|
||||
declare module 'async_hooks' {
|
||||
declare module "async_hooks" {
|
||||
/**
|
||||
* ```js
|
||||
* const async_hooks = require('async_hooks');
|
||||
*
|
||||
* console.log(async_hooks.executionAsyncId()); // 1 - bootstrap
|
||||
* fs.open(path, 'r', (err, fd) => {
|
||||
* console.log(async_hooks.executionAsyncId()); // 6 - open()
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* The ID returned from `executionAsyncId()` is related to execution timing, not
|
||||
* causality (which is covered by `triggerAsyncId()`):
|
||||
*
|
||||
* ```js
|
||||
* const server = net.createServer((conn) => {
|
||||
* // Returns the ID of the server, not of the new connection, because the
|
||||
* // callback runs in the execution scope of the server's MakeCallback().
|
||||
* async_hooks.executionAsyncId();
|
||||
*
|
||||
* }).listen(port, () => {
|
||||
* // Returns the ID of a TickObject (process.nextTick()) because all
|
||||
* // callbacks passed to .listen() are wrapped in a nextTick().
|
||||
* async_hooks.executionAsyncId();
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* Promise contexts may not get precise `executionAsyncIds` by default.
|
||||
* See the section on `promise execution tracking`.
|
||||
* @since v8.1.0
|
||||
* @return The `asyncId` of the current execution context. Useful to track when something calls.
|
||||
* Returns the asyncId of the current execution context.
|
||||
*/
|
||||
function executionAsyncId(): number;
|
||||
|
||||
/**
|
||||
* The resource representing the current execution.
|
||||
* Useful to store data within the resource.
|
||||
*
|
||||
* Resource objects returned by `executionAsyncResource()` are most often internal
|
||||
* Node.js handle objects with undocumented APIs. Using any functions or properties
|
||||
* on the object is likely to crash your application and should be avoided.
|
||||
@@ -49,70 +18,14 @@ declare module 'async_hooks' {
|
||||
* Using `executionAsyncResource()` in the top-level execution context will
|
||||
* return an empty object as there is no handle or request object to use,
|
||||
* but having an object representing the top-level can be helpful.
|
||||
*
|
||||
* ```js
|
||||
* const { open } = require('fs');
|
||||
* const { executionAsyncId, executionAsyncResource } = require('async_hooks');
|
||||
*
|
||||
* console.log(executionAsyncId(), executionAsyncResource()); // 1 {}
|
||||
* open(__filename, 'r', (err, fd) => {
|
||||
* console.log(executionAsyncId(), executionAsyncResource()); // 7 FSReqWrap
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* This can be used to implement continuation local storage without the
|
||||
* use of a tracking `Map` to store the metadata:
|
||||
*
|
||||
* ```js
|
||||
* const { createServer } = require('http');
|
||||
* const {
|
||||
* executionAsyncId,
|
||||
* executionAsyncResource,
|
||||
* createHook
|
||||
* } = require('async_hooks');
|
||||
* const sym = Symbol('state'); // Private symbol to avoid pollution
|
||||
*
|
||||
* createHook({
|
||||
* init(asyncId, type, triggerAsyncId, resource) {
|
||||
* const cr = executionAsyncResource();
|
||||
* if (cr) {
|
||||
* resource[sym] = cr[sym];
|
||||
* }
|
||||
* }
|
||||
* }).enable();
|
||||
*
|
||||
* const server = createServer((req, res) => {
|
||||
* executionAsyncResource()[sym] = { state: req.url };
|
||||
* setTimeout(function() {
|
||||
* res.end(JSON.stringify(executionAsyncResource()[sym]));
|
||||
* }, 100);
|
||||
* }).listen(3000);
|
||||
* ```
|
||||
* @since v13.9.0, v12.17.0
|
||||
* @return The resource representing the current execution. Useful to store data within the resource.
|
||||
*/
|
||||
function executionAsyncResource(): object;
|
||||
|
||||
/**
|
||||
* ```js
|
||||
* const server = net.createServer((conn) => {
|
||||
* // The resource that caused (or triggered) this callback to be called
|
||||
* // was that of the new connection. Thus the return value of triggerAsyncId()
|
||||
* // is the asyncId of "conn".
|
||||
* async_hooks.triggerAsyncId();
|
||||
*
|
||||
* }).listen(port, () => {
|
||||
* // Even though all callbacks passed to .listen() are wrapped in a nextTick()
|
||||
* // the callback itself exists because the call to the server's .listen()
|
||||
* // was made. So the return value would be the ID of the server.
|
||||
* async_hooks.triggerAsyncId();
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* Promise contexts may not get valid `triggerAsyncId`s by default. See
|
||||
* the section on `promise execution tracking`.
|
||||
* @return The ID of the resource responsible for calling the callback that is currently being executed.
|
||||
* Returns the ID of the resource responsible for calling the callback that is currently being executed.
|
||||
*/
|
||||
function triggerAsyncId(): number;
|
||||
|
||||
interface HookCallbacks {
|
||||
/**
|
||||
* Called when a class is constructed that has the possibility to emit an asynchronous event.
|
||||
@@ -122,133 +35,73 @@ declare module 'async_hooks' {
|
||||
* @param resource reference to the resource representing the async operation, needs to be released during destroy
|
||||
*/
|
||||
init?(asyncId: number, type: string, triggerAsyncId: number, resource: object): void;
|
||||
|
||||
/**
|
||||
* When an asynchronous operation is initiated or completes a callback is called to notify the user.
|
||||
* The before callback is called just before said callback is executed.
|
||||
* @param asyncId the unique identifier assigned to the resource about to execute the callback.
|
||||
*/
|
||||
before?(asyncId: number): void;
|
||||
|
||||
/**
|
||||
* Called immediately after the callback specified in before is completed.
|
||||
* @param asyncId the unique identifier assigned to the resource which has executed the callback.
|
||||
*/
|
||||
after?(asyncId: number): void;
|
||||
|
||||
/**
|
||||
* Called when a promise has resolve() called. This may not be in the same execution id
|
||||
* as the promise itself.
|
||||
* @param asyncId the unique id for the promise that was resolve()d.
|
||||
*/
|
||||
promiseResolve?(asyncId: number): void;
|
||||
|
||||
/**
|
||||
* Called after the resource corresponding to asyncId is destroyed
|
||||
* @param asyncId a unique ID for the async resource
|
||||
*/
|
||||
destroy?(asyncId: number): void;
|
||||
}
|
||||
|
||||
interface AsyncHook {
|
||||
/**
|
||||
* Enable the callbacks for a given AsyncHook instance. If no callbacks are provided enabling is a noop.
|
||||
*/
|
||||
enable(): this;
|
||||
|
||||
/**
|
||||
* Disable the callbacks for a given AsyncHook instance from the global pool of AsyncHook callbacks to be executed. Once a hook has been disabled it will not be called again until enabled.
|
||||
*/
|
||||
disable(): this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers functions to be called for different lifetime events of each async
|
||||
* operation.
|
||||
*
|
||||
* The callbacks `init()`/`before()`/`after()`/`destroy()` are called for the
|
||||
* respective asynchronous event during a resource's lifetime.
|
||||
*
|
||||
* All callbacks are optional. For example, if only resource cleanup needs to
|
||||
* be tracked, then only the `destroy` callback needs to be passed. The
|
||||
* specifics of all functions that can be passed to `callbacks` is in the `Hook Callbacks` section.
|
||||
*
|
||||
* ```js
|
||||
* const async_hooks = require('async_hooks');
|
||||
*
|
||||
* const asyncHook = async_hooks.createHook({
|
||||
* init(asyncId, type, triggerAsyncId, resource) { },
|
||||
* destroy(asyncId) { }
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* The callbacks will be inherited via the prototype chain:
|
||||
*
|
||||
* ```js
|
||||
* class MyAsyncCallbacks {
|
||||
* init(asyncId, type, triggerAsyncId, resource) { }
|
||||
* destroy(asyncId) {}
|
||||
* }
|
||||
*
|
||||
* class MyAddedCallbacks extends MyAsyncCallbacks {
|
||||
* before(asyncId) { }
|
||||
* after(asyncId) { }
|
||||
* }
|
||||
*
|
||||
* const asyncHook = async_hooks.createHook(new MyAddedCallbacks());
|
||||
* ```
|
||||
*
|
||||
* Because promises are asynchronous resources whose lifecycle is tracked
|
||||
* via the async hooks mechanism, the `init()`, `before()`, `after()`, and`destroy()` callbacks _must not_ be async functions that return promises.
|
||||
* @since v8.1.0
|
||||
* @param callbacks The `Hook Callbacks` to register
|
||||
* @return Instance used for disabling and enabling hooks
|
||||
* Registers functions to be called for different lifetime events of each async operation.
|
||||
* @param options the callbacks to register
|
||||
* @return an AsyncHooks instance used for disabling and enabling hooks
|
||||
*/
|
||||
function createHook(callbacks: HookCallbacks): AsyncHook;
|
||||
function createHook(options: HookCallbacks): AsyncHook;
|
||||
|
||||
interface AsyncResourceOptions {
|
||||
/**
|
||||
* The ID of the execution context that created this async event.
|
||||
* @default executionAsyncId()
|
||||
*/
|
||||
triggerAsyncId?: number | undefined;
|
||||
/**
|
||||
* Disables automatic `emitDestroy` when the object is garbage collected.
|
||||
* This usually does not need to be set (even if `emitDestroy` is called
|
||||
* manually), unless the resource's `asyncId` is retrieved and the
|
||||
* sensitive API's `emitDestroy` is called with it.
|
||||
* @default false
|
||||
*/
|
||||
requireManualDestroy?: boolean | undefined;
|
||||
/**
|
||||
* The ID of the execution context that created this async event.
|
||||
* Default: `executionAsyncId()`
|
||||
*/
|
||||
triggerAsyncId?: number;
|
||||
|
||||
/**
|
||||
* Disables automatic `emitDestroy` when the object is garbage collected.
|
||||
* This usually does not need to be set (even if `emitDestroy` is called
|
||||
* manually), unless the resource's `asyncId` is retrieved and the
|
||||
* sensitive API's `emitDestroy` is called with it.
|
||||
* Default: `false`
|
||||
*/
|
||||
requireManualDestroy?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* The class `AsyncResource` is designed to be extended by the embedder's async
|
||||
* resources. Using this, users can easily trigger the lifetime events of their
|
||||
* own resources.
|
||||
*
|
||||
* The `init` hook will trigger when an `AsyncResource` is instantiated.
|
||||
*
|
||||
* The following is an overview of the `AsyncResource` API.
|
||||
*
|
||||
* ```js
|
||||
* const { AsyncResource, executionAsyncId } = require('async_hooks');
|
||||
*
|
||||
* // AsyncResource() is meant to be extended. Instantiating a
|
||||
* // new AsyncResource() also triggers init. If triggerAsyncId is omitted then
|
||||
* // async_hook.executionAsyncId() is used.
|
||||
* const asyncResource = new AsyncResource(
|
||||
* type, { triggerAsyncId: executionAsyncId(), requireManualDestroy: false }
|
||||
* );
|
||||
*
|
||||
* // Run a function in the execution context of the resource. This will
|
||||
* // * establish the context of the resource
|
||||
* // * trigger the AsyncHooks before callbacks
|
||||
* // * call the provided function `fn` with the supplied arguments
|
||||
* // * trigger the AsyncHooks after callbacks
|
||||
* // * restore the original execution context
|
||||
* asyncResource.runInAsyncScope(fn, thisArg, ...args);
|
||||
*
|
||||
* // Call AsyncHooks destroy callbacks.
|
||||
* asyncResource.emitDestroy();
|
||||
*
|
||||
* // Return the unique ID assigned to the AsyncResource instance.
|
||||
* asyncResource.asyncId();
|
||||
*
|
||||
* // Return the trigger ID for the AsyncResource instance.
|
||||
* asyncResource.triggerAsyncId();
|
||||
* ```
|
||||
* The class AsyncResource was designed to be extended by the embedder's async resources.
|
||||
* Using this users can easily trigger the lifetime events of their own resources.
|
||||
*/
|
||||
class AsyncResource {
|
||||
/**
|
||||
@@ -260,238 +113,135 @@ declare module 'async_hooks' {
|
||||
* this async event (default: `executionAsyncId()`), or an
|
||||
* AsyncResourceOptions object (since 9.3)
|
||||
*/
|
||||
constructor(type: string, triggerAsyncId?: number | AsyncResourceOptions);
|
||||
constructor(type: string, triggerAsyncId?: number|AsyncResourceOptions);
|
||||
|
||||
/**
|
||||
* Binds the given function to the current execution context.
|
||||
*
|
||||
* The returned function will have an `asyncResource` property referencing
|
||||
* the `AsyncResource` to which the function is bound.
|
||||
* @since v14.8.0, v12.19.0
|
||||
* @param fn The function to bind to the current execution context.
|
||||
* @param type An optional name to associate with the underlying `AsyncResource`.
|
||||
*/
|
||||
static bind<Func extends (this: ThisArg, ...args: any[]) => any, ThisArg>(
|
||||
fn: Func,
|
||||
type?: string,
|
||||
thisArg?: ThisArg
|
||||
): Func & {
|
||||
asyncResource: AsyncResource;
|
||||
};
|
||||
/**
|
||||
* Binds the given function to execute to this `AsyncResource`'s scope.
|
||||
*
|
||||
* The returned function will have an `asyncResource` property referencing
|
||||
* the `AsyncResource` to which the function is bound.
|
||||
* @since v14.8.0, v12.19.0
|
||||
* @param fn The function to bind to the current `AsyncResource`.
|
||||
*/
|
||||
bind<Func extends (...args: any[]) => any>(
|
||||
fn: Func
|
||||
): Func & {
|
||||
asyncResource: AsyncResource;
|
||||
};
|
||||
/**
|
||||
* Call the provided function with the provided arguments in the execution context
|
||||
* of the async resource. This will establish the context, trigger the AsyncHooks
|
||||
* before callbacks, call the function, trigger the AsyncHooks after callbacks, and
|
||||
* then restore the original execution context.
|
||||
* @since v9.6.0
|
||||
* @param fn The function to call in the execution context of this async resource.
|
||||
* Call the provided function with the provided arguments in the
|
||||
* execution context of the async resource. This will establish the
|
||||
* context, trigger the AsyncHooks before callbacks, call the function,
|
||||
* trigger the AsyncHooks after callbacks, and then restore the original
|
||||
* execution context.
|
||||
* @param fn The function to call in the execution context of this
|
||||
* async resource.
|
||||
* @param thisArg The receiver to be used for the function call.
|
||||
* @param args Optional arguments to pass to the function.
|
||||
*/
|
||||
runInAsyncScope<This, Result>(fn: (this: This, ...args: any[]) => Result, thisArg?: This, ...args: any[]): Result;
|
||||
|
||||
/**
|
||||
* Call all `destroy` hooks. This should only ever be called once. An error will
|
||||
* be thrown if it is called more than once. This **must** be manually called. If
|
||||
* the resource is left to be collected by the GC then the `destroy` hooks will
|
||||
* never be called.
|
||||
* @return A reference to `asyncResource`.
|
||||
* Call AsyncHooks destroy callbacks.
|
||||
*/
|
||||
emitDestroy(): this;
|
||||
emitDestroy(): void;
|
||||
|
||||
/**
|
||||
* @return The unique `asyncId` assigned to the resource.
|
||||
* @return the unique ID assigned to this AsyncResource instance.
|
||||
*/
|
||||
asyncId(): number;
|
||||
|
||||
/**
|
||||
*
|
||||
* @return The same `triggerAsyncId` that is passed to the `AsyncResource` constructor.
|
||||
* @return the trigger ID for this AsyncResource instance.
|
||||
*/
|
||||
triggerAsyncId(): number;
|
||||
}
|
||||
|
||||
/**
|
||||
* This class creates stores that stay coherent through asynchronous operations.
|
||||
*
|
||||
* While you can create your own implementation on top of the `async_hooks` module,`AsyncLocalStorage` should be preferred as it is a performant and memory safe
|
||||
* implementation that involves significant optimizations that are non-obvious to
|
||||
* implement.
|
||||
*
|
||||
* The following example uses `AsyncLocalStorage` to build a simple logger
|
||||
* that assigns IDs to incoming HTTP requests and includes them in messages
|
||||
* logged within each request.
|
||||
*
|
||||
* ```js
|
||||
* const http = require('http');
|
||||
* const { AsyncLocalStorage } = require('async_hooks');
|
||||
*
|
||||
* const asyncLocalStorage = new AsyncLocalStorage();
|
||||
*
|
||||
* function logWithId(msg) {
|
||||
* const id = asyncLocalStorage.getStore();
|
||||
* console.log(`${id !== undefined ? id : '-'}:`, msg);
|
||||
* }
|
||||
*
|
||||
* let idSeq = 0;
|
||||
* http.createServer((req, res) => {
|
||||
* asyncLocalStorage.run(idSeq++, () => {
|
||||
* logWithId('start');
|
||||
* // Imagine any chain of async operations here
|
||||
* setImmediate(() => {
|
||||
* logWithId('finish');
|
||||
* res.end();
|
||||
* });
|
||||
* });
|
||||
* }).listen(8080);
|
||||
*
|
||||
* http.get('http://localhost:8080');
|
||||
* http.get('http://localhost:8080');
|
||||
* // Prints:
|
||||
* // 0: start
|
||||
* // 1: start
|
||||
* // 0: finish
|
||||
* // 1: finish
|
||||
* ```
|
||||
*
|
||||
* Each instance of `AsyncLocalStorage` maintains an independent storage context.
|
||||
* Multiple instances can safely exist simultaneously without risk of interfering
|
||||
* with each other data.
|
||||
* @since v13.10.0, v12.17.0
|
||||
* When having multiple instances of `AsyncLocalStorage`, they are independent
|
||||
* from each other. It is safe to instantiate this class multiple times.
|
||||
*/
|
||||
class AsyncLocalStorage<T> {
|
||||
/**
|
||||
* Disables the instance of `AsyncLocalStorage`. All subsequent calls
|
||||
* to `asyncLocalStorage.getStore()` will return `undefined` until`asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()` is called again.
|
||||
* This method disables the instance of `AsyncLocalStorage`. All subsequent calls
|
||||
* to `asyncLocalStorage.getStore()` will return `undefined` until
|
||||
* `asyncLocalStorage.run()` or `asyncLocalStorage.runSyncAndReturn()`
|
||||
* is called again.
|
||||
*
|
||||
* When calling `asyncLocalStorage.disable()`, all current contexts linked to the
|
||||
* instance will be exited.
|
||||
*
|
||||
* Calling `asyncLocalStorage.disable()` is required before the`asyncLocalStorage` can be garbage collected. This does not apply to stores
|
||||
* Calling `asyncLocalStorage.disable()` is required before the
|
||||
* `asyncLocalStorage` can be garbage collected. This does not apply to stores
|
||||
* provided by the `asyncLocalStorage`, as those objects are garbage collected
|
||||
* along with the corresponding async resources.
|
||||
*
|
||||
* Use this method when the `asyncLocalStorage` is not in use anymore
|
||||
* This method is to be used when the `asyncLocalStorage` is not in use anymore
|
||||
* in the current process.
|
||||
* @since v13.10.0, v12.17.0
|
||||
* @experimental
|
||||
*/
|
||||
disable(): void;
|
||||
|
||||
/**
|
||||
* Returns the current store.
|
||||
* If called outside of an asynchronous context initialized by
|
||||
* calling `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()`, it
|
||||
* returns `undefined`.
|
||||
* @since v13.10.0, v12.17.0
|
||||
* This method returns the current store.
|
||||
* If this method is called outside of an asynchronous context initialized by
|
||||
* calling `asyncLocalStorage.run` or `asyncLocalStorage.runAndReturn`, it will
|
||||
* return `undefined`.
|
||||
*/
|
||||
getStore(): T | undefined;
|
||||
|
||||
/**
|
||||
* Runs a function synchronously within a context and returns its
|
||||
* Calling `asyncLocalStorage.run(callback)` will create a new asynchronous
|
||||
* context.
|
||||
* Within the callback function and the asynchronous operations from the callback,
|
||||
* `asyncLocalStorage.getStore()` will return an instance of `Map` known as
|
||||
* "the store". This store will be persistent through the following
|
||||
* asynchronous calls.
|
||||
*
|
||||
* The callback will be ran asynchronously. Optionally, arguments can be passed
|
||||
* to the function. They will be passed to the callback function.
|
||||
*
|
||||
* If an error is thrown by the callback function, it will not be caught by
|
||||
* a `try/catch` block as the callback is ran in a new asynchronous resource.
|
||||
* Also, the stacktrace will be impacted by the asynchronous call.
|
||||
*/
|
||||
// TODO: Apply generic vararg once available
|
||||
run(store: T, callback: (...args: any[]) => void, ...args: any[]): void;
|
||||
|
||||
/**
|
||||
* Calling `asyncLocalStorage.exit(callback)` will create a new asynchronous
|
||||
* context.
|
||||
* Within the callback function and the asynchronous operations from the callback,
|
||||
* `asyncLocalStorage.getStore()` will return `undefined`.
|
||||
*
|
||||
* The callback will be ran asynchronously. Optionally, arguments can be passed
|
||||
* to the function. They will be passed to the callback function.
|
||||
*
|
||||
* If an error is thrown by the callback function, it will not be caught by
|
||||
* a `try/catch` block as the callback is ran in a new asynchronous resource.
|
||||
* Also, the stacktrace will be impacted by the asynchronous call.
|
||||
*/
|
||||
exit(callback: (...args: any[]) => void, ...args: any[]): void;
|
||||
|
||||
/**
|
||||
* This methods runs a function synchronously within a context and return its
|
||||
* return value. The store is not accessible outside of the callback function or
|
||||
* the asynchronous operations created within the callback.
|
||||
*
|
||||
* The optional `args` are passed to the callback function.
|
||||
* Optionally, arguments can be passed to the function. They will be passed to
|
||||
* the callback function.
|
||||
*
|
||||
* If the callback function throws an error, the error is thrown by `run()` too.
|
||||
* The stacktrace is not impacted by this call and the context is exited.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```js
|
||||
* const store = { id: 2 };
|
||||
* try {
|
||||
* asyncLocalStorage.run(store, () => {
|
||||
* asyncLocalStorage.getStore(); // Returns the store object
|
||||
* throw new Error();
|
||||
* });
|
||||
* } catch (e) {
|
||||
* asyncLocalStorage.getStore(); // Returns undefined
|
||||
* // The error will be caught here
|
||||
* }
|
||||
* ```
|
||||
* @since v13.10.0, v12.17.0
|
||||
* If the callback function throws an error, it will be thrown by
|
||||
* `runSyncAndReturn` too. The stacktrace will not be impacted by this call and
|
||||
* the context will be exited.
|
||||
*/
|
||||
run<R, TArgs extends any[]>(store: T, callback: (...args: TArgs) => R, ...args: TArgs): R;
|
||||
runSyncAndReturn<R>(store: T, callback: (...args: any[]) => R, ...args: any[]): R;
|
||||
|
||||
/**
|
||||
* Runs a function synchronously outside of a context and returns its
|
||||
* This methods runs a function synchronously outside of a context and return its
|
||||
* return value. The store is not accessible within the callback function or
|
||||
* the asynchronous operations created within the callback. Any `getStore()`call done within the callback function will always return `undefined`.
|
||||
* the asynchronous operations created within the callback.
|
||||
*
|
||||
* The optional `args` are passed to the callback function.
|
||||
* Optionally, arguments can be passed to the function. They will be passed to
|
||||
* the callback function.
|
||||
*
|
||||
* If the callback function throws an error, the error is thrown by `exit()` too.
|
||||
* The stacktrace is not impacted by this call and the context is re-entered.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```js
|
||||
* // Within a call to run
|
||||
* try {
|
||||
* asyncLocalStorage.getStore(); // Returns the store object or value
|
||||
* asyncLocalStorage.exit(() => {
|
||||
* asyncLocalStorage.getStore(); // Returns undefined
|
||||
* throw new Error();
|
||||
* });
|
||||
* } catch (e) {
|
||||
* asyncLocalStorage.getStore(); // Returns the same object or value
|
||||
* // The error will be caught here
|
||||
* }
|
||||
* ```
|
||||
* @since v13.10.0, v12.17.0
|
||||
* @experimental
|
||||
* If the callback function throws an error, it will be thrown by
|
||||
* `exitSyncAndReturn` too. The stacktrace will not be impacted by this call and
|
||||
* the context will be re-entered.
|
||||
*/
|
||||
exit<R, TArgs extends any[]>(callback: (...args: TArgs) => R, ...args: TArgs): R;
|
||||
exitSyncAndReturn<R>(callback: (...args: any[]) => R, ...args: any[]): R;
|
||||
|
||||
/**
|
||||
* Transitions into the context for the remainder of the current
|
||||
* synchronous execution and then persists the store through any following
|
||||
* asynchronous calls.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```js
|
||||
* const store = { id: 1 };
|
||||
* // Replaces previous store with the given store object
|
||||
* asyncLocalStorage.enterWith(store);
|
||||
* asyncLocalStorage.getStore(); // Returns the store object
|
||||
* someAsyncOperation(() => {
|
||||
* asyncLocalStorage.getStore(); // Returns the same object
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* This transition will continue for the _entire_ synchronous execution.
|
||||
* This means that if, for example, the context is entered within an event
|
||||
* handler subsequent event handlers will also run within that context unless
|
||||
* specifically bound to another context with an `AsyncResource`. That is why`run()` should be preferred over `enterWith()` unless there are strong reasons
|
||||
* to use the latter method.
|
||||
*
|
||||
* ```js
|
||||
* const store = { id: 1 };
|
||||
*
|
||||
* emitter.on('my-event', () => {
|
||||
* asyncLocalStorage.enterWith(store);
|
||||
* });
|
||||
* emitter.on('my-event', () => {
|
||||
* asyncLocalStorage.getStore(); // Returns the same object
|
||||
* });
|
||||
*
|
||||
* asyncLocalStorage.getStore(); // Returns undefined
|
||||
* emitter.emit('my-event');
|
||||
* asyncLocalStorage.getStore(); // Returns the same object
|
||||
* ```
|
||||
* @since v13.11.0, v12.17.0
|
||||
* @experimental
|
||||
* Calling `asyncLocalStorage.enterWith(store)` will transition into the context
|
||||
* for the remainder of the current synchronous execution and will persist
|
||||
* through any following asynchronous calls.
|
||||
*/
|
||||
enterWith(store: T): void;
|
||||
}
|
||||
}
|
||||
declare module 'node:async_hooks' {
|
||||
export * from 'async_hooks';
|
||||
}
|
||||
|
||||
1
node_modules/@types/node/base.d.ts
generated
vendored
Executable file → Normal file
1
node_modules/@types/node/base.d.ts
generated
vendored
Executable file → Normal file
@@ -13,6 +13,7 @@
|
||||
/// <reference lib="esnext.bigint" />
|
||||
|
||||
// Base definitions for all NodeJS modules that are not specific to any version of TypeScript:
|
||||
// tslint:disable-next-line:no-bad-reference
|
||||
/// <reference path="ts3.6/base.d.ts" />
|
||||
|
||||
// TypeScript 3.7-specific augmentations:
|
||||
|
||||
1783
node_modules/@types/node/buffer.d.ts
generated
vendored
Executable file → Normal file
1783
node_modules/@types/node/buffer.d.ts
generated
vendored
Executable file → Normal file
File diff suppressed because it is too large
Load Diff
1463
node_modules/@types/node/child_process.d.ts
generated
vendored
Executable file → Normal file
1463
node_modules/@types/node/child_process.d.ts
generated
vendored
Executable file → Normal file
File diff suppressed because it is too large
Load Diff
568
node_modules/@types/node/cluster.d.ts
generated
vendored
Executable file → Normal file
568
node_modules/@types/node/cluster.d.ts
generated
vendored
Executable file → Normal file
@@ -1,272 +1,37 @@
|
||||
/**
|
||||
* A single instance of Node.js runs in a single thread. To take advantage of
|
||||
* multi-core systems, the user will sometimes want to launch a cluster of Node.js
|
||||
* processes to handle the load.
|
||||
*
|
||||
* The cluster module allows easy creation of child processes that all share
|
||||
* server ports.
|
||||
*
|
||||
* ```js
|
||||
* const cluster = require('cluster');
|
||||
* const http = require('http');
|
||||
* const numCPUs = require('os').cpus().length;
|
||||
*
|
||||
* if (cluster.isPrimary) {
|
||||
* console.log(`Primary ${process.pid} is running`);
|
||||
*
|
||||
* // Fork workers.
|
||||
* for (let i = 0; i < numCPUs; i++) {
|
||||
* cluster.fork();
|
||||
* }
|
||||
*
|
||||
* cluster.on('exit', (worker, code, signal) => {
|
||||
* console.log(`worker ${worker.process.pid} died`);
|
||||
* });
|
||||
* } else {
|
||||
* // Workers can share any TCP connection
|
||||
* // In this case it is an HTTP server
|
||||
* http.createServer((req, res) => {
|
||||
* res.writeHead(200);
|
||||
* res.end('hello world\n');
|
||||
* }).listen(8000);
|
||||
*
|
||||
* console.log(`Worker ${process.pid} started`);
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* Running Node.js will now share port 8000 between the workers:
|
||||
*
|
||||
* ```console
|
||||
* $ node server.js
|
||||
* Primary 3596 is running
|
||||
* Worker 4324 started
|
||||
* Worker 4520 started
|
||||
* Worker 6056 started
|
||||
* Worker 5644 started
|
||||
* ```
|
||||
*
|
||||
* On Windows, it is not yet possible to set up a named pipe server in a worker.
|
||||
* @see [source](https://github.com/nodejs/node/blob/v16.4.2/lib/cluster.js)
|
||||
*/
|
||||
declare module 'cluster' {
|
||||
import * as child from 'node:child_process';
|
||||
import EventEmitter = require('node:events');
|
||||
import * as net from 'node:net';
|
||||
export interface ClusterSettings {
|
||||
execArgv?: string[] | undefined; // default: process.execArgv
|
||||
exec?: string | undefined;
|
||||
args?: string[] | undefined;
|
||||
silent?: boolean | undefined;
|
||||
stdio?: any[] | undefined;
|
||||
uid?: number | undefined;
|
||||
gid?: number | undefined;
|
||||
inspectPort?: number | (() => number) | undefined;
|
||||
declare module "cluster" {
|
||||
import * as child from "child_process";
|
||||
import * as events from "events";
|
||||
import * as net from "net";
|
||||
|
||||
// interfaces
|
||||
interface ClusterSettings {
|
||||
execArgv?: string[]; // default: process.execArgv
|
||||
exec?: string;
|
||||
args?: string[];
|
||||
silent?: boolean;
|
||||
stdio?: any[];
|
||||
uid?: number;
|
||||
gid?: number;
|
||||
inspectPort?: number | (() => number);
|
||||
}
|
||||
export interface Address {
|
||||
|
||||
interface Address {
|
||||
address: string;
|
||||
port: number;
|
||||
addressType: number | 'udp4' | 'udp6'; // 4, 6, -1, "udp4", "udp6"
|
||||
addressType: number | "udp4" | "udp6"; // 4, 6, -1, "udp4", "udp6"
|
||||
}
|
||||
/**
|
||||
* A `Worker` object contains all public information and method about a worker.
|
||||
* In the primary it can be obtained using `cluster.workers`. In a worker
|
||||
* it can be obtained using `cluster.worker`.
|
||||
* @since v0.7.0
|
||||
*/
|
||||
export class Worker extends EventEmitter {
|
||||
/**
|
||||
* Each new worker is given its own unique id, this id is stored in the`id`.
|
||||
*
|
||||
* While a worker is alive, this is the key that indexes it in`cluster.workers`.
|
||||
* @since v0.8.0
|
||||
*/
|
||||
|
||||
class Worker extends events.EventEmitter {
|
||||
id: number;
|
||||
/**
|
||||
* All workers are created using `child_process.fork()`, the returned object
|
||||
* from this function is stored as `.process`. In a worker, the global `process`is stored.
|
||||
*
|
||||
* See: `Child Process module`.
|
||||
*
|
||||
* Workers will call `process.exit(0)` if the `'disconnect'` event occurs
|
||||
* on `process` and `.exitedAfterDisconnect` is not `true`. This protects against
|
||||
* accidental disconnection.
|
||||
* @since v0.7.0
|
||||
*/
|
||||
process: child.ChildProcess;
|
||||
/**
|
||||
* Send a message to a worker or primary, optionally with a handle.
|
||||
*
|
||||
* In the primary this sends a message to a specific worker. It is identical to `ChildProcess.send()`.
|
||||
*
|
||||
* In a worker this sends a message to the primary. It is identical to`process.send()`.
|
||||
*
|
||||
* This example will echo back all messages from the primary:
|
||||
*
|
||||
* ```js
|
||||
* if (cluster.isPrimary) {
|
||||
* const worker = cluster.fork();
|
||||
* worker.send('hi there');
|
||||
*
|
||||
* } else if (cluster.isWorker) {
|
||||
* process.on('message', (msg) => {
|
||||
* process.send(msg);
|
||||
* });
|
||||
* }
|
||||
* ```
|
||||
* @since v0.7.0
|
||||
* @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles. `options` supports the following properties:
|
||||
*/
|
||||
send(message: child.Serializable, callback?: (error: Error | null) => void): boolean;
|
||||
send(message: child.Serializable, sendHandle: child.SendHandle, callback?: (error: Error | null) => void): boolean;
|
||||
send(message: child.Serializable, sendHandle: child.SendHandle, options?: child.MessageOptions, callback?: (error: Error | null) => void): boolean;
|
||||
/**
|
||||
* This function will kill the worker. In the primary, it does this
|
||||
* by disconnecting the `worker.process`, and once disconnected, killing
|
||||
* with `signal`. In the worker, it does it by disconnecting the channel,
|
||||
* and then exiting with code `0`.
|
||||
*
|
||||
* Because `kill()` attempts to gracefully disconnect the worker process, it is
|
||||
* susceptible to waiting indefinitely for the disconnect to complete. For example,
|
||||
* if the worker enters an infinite loop, a graceful disconnect will never occur.
|
||||
* If the graceful disconnect behavior is not needed, use `worker.process.kill()`.
|
||||
*
|
||||
* Causes `.exitedAfterDisconnect` to be set.
|
||||
*
|
||||
* This method is aliased as `worker.destroy()` for backward compatibility.
|
||||
*
|
||||
* In a worker, `process.kill()` exists, but it is not this function;
|
||||
* it is `kill()`.
|
||||
* @since v0.9.12
|
||||
* @param signal Name of the kill signal to send to the worker process.
|
||||
*/
|
||||
send(message: any, sendHandle?: any, callback?: (error: Error | null) => void): boolean;
|
||||
kill(signal?: string): void;
|
||||
destroy(signal?: string): void;
|
||||
/**
|
||||
* In a worker, this function will close all servers, wait for the `'close'` event
|
||||
* on those servers, and then disconnect the IPC channel.
|
||||
*
|
||||
* In the primary, an internal message is sent to the worker causing it to call`.disconnect()` on itself.
|
||||
*
|
||||
* Causes `.exitedAfterDisconnect` to be set.
|
||||
*
|
||||
* After a server is closed, it will no longer accept new connections,
|
||||
* but connections may be accepted by any other listening worker. Existing
|
||||
* connections will be allowed to close as usual. When no more connections exist,
|
||||
* see `server.close()`, the IPC channel to the worker will close allowing it
|
||||
* to die gracefully.
|
||||
*
|
||||
* The above applies _only_ to server connections, client connections are not
|
||||
* automatically closed by workers, and disconnect does not wait for them to close
|
||||
* before exiting.
|
||||
*
|
||||
* In a worker, `process.disconnect` exists, but it is not this function;
|
||||
* it is `disconnect()`.
|
||||
*
|
||||
* Because long living server connections may block workers from disconnecting, it
|
||||
* may be useful to send a message, so application specific actions may be taken to
|
||||
* close them. It also may be useful to implement a timeout, killing a worker if
|
||||
* the `'disconnect'` event has not been emitted after some time.
|
||||
*
|
||||
* ```js
|
||||
* if (cluster.isPrimary) {
|
||||
* const worker = cluster.fork();
|
||||
* let timeout;
|
||||
*
|
||||
* worker.on('listening', (address) => {
|
||||
* worker.send('shutdown');
|
||||
* worker.disconnect();
|
||||
* timeout = setTimeout(() => {
|
||||
* worker.kill();
|
||||
* }, 2000);
|
||||
* });
|
||||
*
|
||||
* worker.on('disconnect', () => {
|
||||
* clearTimeout(timeout);
|
||||
* });
|
||||
*
|
||||
* } else if (cluster.isWorker) {
|
||||
* const net = require('net');
|
||||
* const server = net.createServer((socket) => {
|
||||
* // Connections never end
|
||||
* });
|
||||
*
|
||||
* server.listen(8000);
|
||||
*
|
||||
* process.on('message', (msg) => {
|
||||
* if (msg === 'shutdown') {
|
||||
* // Initiate graceful close of any connections to server
|
||||
* }
|
||||
* });
|
||||
* }
|
||||
* ```
|
||||
* @since v0.7.7
|
||||
* @return A reference to `worker`.
|
||||
*/
|
||||
disconnect(): void;
|
||||
/**
|
||||
* This function returns `true` if the worker is connected to its primary via its
|
||||
* IPC channel, `false` otherwise. A worker is connected to its primary after it
|
||||
* has been created. It is disconnected after the `'disconnect'` event is emitted.
|
||||
* @since v0.11.14
|
||||
*/
|
||||
isConnected(): boolean;
|
||||
/**
|
||||
* This function returns `true` if the worker's process has terminated (either
|
||||
* because of exiting or being signaled). Otherwise, it returns `false`.
|
||||
*
|
||||
* ```js
|
||||
* const cluster = require('cluster');
|
||||
* const http = require('http');
|
||||
* const numCPUs = require('os').cpus().length;
|
||||
*
|
||||
* if (cluster.isPrimary) {
|
||||
* console.log(`Primary ${process.pid} is running`);
|
||||
*
|
||||
* // Fork workers.
|
||||
* for (let i = 0; i < numCPUs; i++) {
|
||||
* cluster.fork();
|
||||
* }
|
||||
*
|
||||
* cluster.on('fork', (worker) => {
|
||||
* console.log('worker is dead:', worker.isDead());
|
||||
* });
|
||||
*
|
||||
* cluster.on('exit', (worker, code, signal) => {
|
||||
* console.log('worker is dead:', worker.isDead());
|
||||
* });
|
||||
* } else {
|
||||
* // Workers can share any TCP connection. In this case, it is an HTTP server.
|
||||
* http.createServer((req, res) => {
|
||||
* res.writeHead(200);
|
||||
* res.end(`Current process\n ${process.pid}`);
|
||||
* process.kill(process.pid);
|
||||
* }).listen(8000);
|
||||
* }
|
||||
* ```
|
||||
* @since v0.11.14
|
||||
*/
|
||||
isDead(): boolean;
|
||||
/**
|
||||
* This property is `true` if the worker exited due to `.kill()` or`.disconnect()`. If the worker exited any other way, it is `false`. If the
|
||||
* worker has not exited, it is `undefined`.
|
||||
*
|
||||
* The boolean `worker.exitedAfterDisconnect` allows distinguishing between
|
||||
* voluntary and accidental exit, the primary may choose not to respawn a worker
|
||||
* based on this value.
|
||||
*
|
||||
* ```js
|
||||
* cluster.on('exit', (worker, code, signal) => {
|
||||
* if (worker.exitedAfterDisconnect === true) {
|
||||
* console.log('Oh, it was just voluntary – no need to worry');
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* // kill worker
|
||||
* worker.kill();
|
||||
* ```
|
||||
* @since v6.0.0
|
||||
*/
|
||||
exitedAfterDisconnect: boolean;
|
||||
|
||||
/**
|
||||
* events.EventEmitter
|
||||
* 1. disconnect
|
||||
@@ -277,67 +42,68 @@ declare module 'cluster' {
|
||||
* 6. online
|
||||
*/
|
||||
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||
addListener(event: 'disconnect', listener: () => void): this;
|
||||
addListener(event: 'error', listener: (error: Error) => void): this;
|
||||
addListener(event: 'exit', listener: (code: number, signal: string) => void): this;
|
||||
addListener(event: 'listening', listener: (address: Address) => void): this;
|
||||
addListener(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
addListener(event: 'online', listener: () => void): this;
|
||||
addListener(event: "disconnect", listener: () => void): this;
|
||||
addListener(event: "error", listener: (error: Error) => void): this;
|
||||
addListener(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||
addListener(event: "listening", listener: (address: Address) => void): this;
|
||||
addListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
addListener(event: "online", listener: () => void): this;
|
||||
|
||||
emit(event: string | symbol, ...args: any[]): boolean;
|
||||
emit(event: 'disconnect'): boolean;
|
||||
emit(event: 'error', error: Error): boolean;
|
||||
emit(event: 'exit', code: number, signal: string): boolean;
|
||||
emit(event: 'listening', address: Address): boolean;
|
||||
emit(event: 'message', message: any, handle: net.Socket | net.Server): boolean;
|
||||
emit(event: 'online'): boolean;
|
||||
emit(event: "disconnect"): boolean;
|
||||
emit(event: "error", error: Error): boolean;
|
||||
emit(event: "exit", code: number, signal: string): boolean;
|
||||
emit(event: "listening", address: Address): boolean;
|
||||
emit(event: "message", message: any, handle: net.Socket | net.Server): boolean;
|
||||
emit(event: "online"): boolean;
|
||||
|
||||
on(event: string, listener: (...args: any[]) => void): this;
|
||||
on(event: 'disconnect', listener: () => void): this;
|
||||
on(event: 'error', listener: (error: Error) => void): this;
|
||||
on(event: 'exit', listener: (code: number, signal: string) => void): this;
|
||||
on(event: 'listening', listener: (address: Address) => void): this;
|
||||
on(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
on(event: 'online', listener: () => void): this;
|
||||
on(event: "disconnect", listener: () => void): this;
|
||||
on(event: "error", listener: (error: Error) => void): this;
|
||||
on(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||
on(event: "listening", listener: (address: Address) => void): this;
|
||||
on(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
on(event: "online", listener: () => void): this;
|
||||
|
||||
once(event: string, listener: (...args: any[]) => void): this;
|
||||
once(event: 'disconnect', listener: () => void): this;
|
||||
once(event: 'error', listener: (error: Error) => void): this;
|
||||
once(event: 'exit', listener: (code: number, signal: string) => void): this;
|
||||
once(event: 'listening', listener: (address: Address) => void): this;
|
||||
once(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
once(event: 'online', listener: () => void): this;
|
||||
once(event: "disconnect", listener: () => void): this;
|
||||
once(event: "error", listener: (error: Error) => void): this;
|
||||
once(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||
once(event: "listening", listener: (address: Address) => void): this;
|
||||
once(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
once(event: "online", listener: () => void): this;
|
||||
|
||||
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependListener(event: 'disconnect', listener: () => void): this;
|
||||
prependListener(event: 'error', listener: (error: Error) => void): this;
|
||||
prependListener(event: 'exit', listener: (code: number, signal: string) => void): this;
|
||||
prependListener(event: 'listening', listener: (address: Address) => void): this;
|
||||
prependListener(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
prependListener(event: 'online', listener: () => void): this;
|
||||
prependListener(event: "disconnect", listener: () => void): this;
|
||||
prependListener(event: "error", listener: (error: Error) => void): this;
|
||||
prependListener(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||
prependListener(event: "listening", listener: (address: Address) => void): this;
|
||||
prependListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
prependListener(event: "online", listener: () => void): this;
|
||||
|
||||
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependOnceListener(event: 'disconnect', listener: () => void): this;
|
||||
prependOnceListener(event: 'error', listener: (error: Error) => void): this;
|
||||
prependOnceListener(event: 'exit', listener: (code: number, signal: string) => void): this;
|
||||
prependOnceListener(event: 'listening', listener: (address: Address) => void): this;
|
||||
prependOnceListener(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
prependOnceListener(event: 'online', listener: () => void): this;
|
||||
prependOnceListener(event: "disconnect", listener: () => void): this;
|
||||
prependOnceListener(event: "error", listener: (error: Error) => void): this;
|
||||
prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this;
|
||||
prependOnceListener(event: "listening", listener: (address: Address) => void): this;
|
||||
prependOnceListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
prependOnceListener(event: "online", listener: () => void): this;
|
||||
}
|
||||
export interface Cluster extends EventEmitter {
|
||||
|
||||
interface Cluster extends events.EventEmitter {
|
||||
Worker: Worker;
|
||||
disconnect(callback?: () => void): void;
|
||||
fork(env?: any): Worker;
|
||||
/** @deprecated since v16.0.0 - use setupPrimary. */
|
||||
readonly isMaster: boolean;
|
||||
readonly isPrimary: boolean;
|
||||
readonly isWorker: boolean;
|
||||
schedulingPolicy: number;
|
||||
readonly settings: ClusterSettings;
|
||||
/** @deprecated since v16.0.0 - use setupPrimary. */
|
||||
isMaster: boolean;
|
||||
isWorker: boolean;
|
||||
// TODO: cluster.schedulingPolicy
|
||||
settings: ClusterSettings;
|
||||
setupMaster(settings?: ClusterSettings): void;
|
||||
/**
|
||||
* `setupPrimary` is used to change the default 'fork' behavior. Once called, the settings will be present in cluster.settings.
|
||||
*/
|
||||
setupPrimary(settings?: ClusterSettings): void;
|
||||
readonly worker?: Worker | undefined;
|
||||
readonly workers?: NodeJS.Dict<Worker> | undefined;
|
||||
readonly SCHED_NONE: number;
|
||||
readonly SCHED_RR: number;
|
||||
worker?: Worker;
|
||||
workers?: {
|
||||
[index: string]: Worker | undefined
|
||||
};
|
||||
|
||||
/**
|
||||
* events.EventEmitter
|
||||
* 1. disconnect
|
||||
@@ -349,60 +115,146 @@ declare module 'cluster' {
|
||||
* 7. setup
|
||||
*/
|
||||
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||
addListener(event: 'disconnect', listener: (worker: Worker) => void): this;
|
||||
addListener(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
addListener(event: 'fork', listener: (worker: Worker) => void): this;
|
||||
addListener(event: 'listening', listener: (worker: Worker, address: Address) => void): this;
|
||||
addListener(event: 'message', listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
addListener(event: 'online', listener: (worker: Worker) => void): this;
|
||||
addListener(event: 'setup', listener: (settings: ClusterSettings) => void): this;
|
||||
addListener(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||
addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
addListener(event: "fork", listener: (worker: Worker) => void): this;
|
||||
addListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||
addListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
addListener(event: "online", listener: (worker: Worker) => void): this;
|
||||
addListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||
|
||||
emit(event: string | symbol, ...args: any[]): boolean;
|
||||
emit(event: 'disconnect', worker: Worker): boolean;
|
||||
emit(event: 'exit', worker: Worker, code: number, signal: string): boolean;
|
||||
emit(event: 'fork', worker: Worker): boolean;
|
||||
emit(event: 'listening', worker: Worker, address: Address): boolean;
|
||||
emit(event: 'message', worker: Worker, message: any, handle: net.Socket | net.Server): boolean;
|
||||
emit(event: 'online', worker: Worker): boolean;
|
||||
emit(event: 'setup', settings: ClusterSettings): boolean;
|
||||
emit(event: "disconnect", worker: Worker): boolean;
|
||||
emit(event: "exit", worker: Worker, code: number, signal: string): boolean;
|
||||
emit(event: "fork", worker: Worker): boolean;
|
||||
emit(event: "listening", worker: Worker, address: Address): boolean;
|
||||
emit(event: "message", worker: Worker, message: any, handle: net.Socket | net.Server): boolean;
|
||||
emit(event: "online", worker: Worker): boolean;
|
||||
emit(event: "setup", settings: ClusterSettings): boolean;
|
||||
|
||||
on(event: string, listener: (...args: any[]) => void): this;
|
||||
on(event: 'disconnect', listener: (worker: Worker) => void): this;
|
||||
on(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
on(event: 'fork', listener: (worker: Worker) => void): this;
|
||||
on(event: 'listening', listener: (worker: Worker, address: Address) => void): this;
|
||||
on(event: 'message', listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
on(event: 'online', listener: (worker: Worker) => void): this;
|
||||
on(event: 'setup', listener: (settings: ClusterSettings) => void): this;
|
||||
on(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||
on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
on(event: "fork", listener: (worker: Worker) => void): this;
|
||||
on(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||
on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
on(event: "online", listener: (worker: Worker) => void): this;
|
||||
on(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||
|
||||
once(event: string, listener: (...args: any[]) => void): this;
|
||||
once(event: 'disconnect', listener: (worker: Worker) => void): this;
|
||||
once(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
once(event: 'fork', listener: (worker: Worker) => void): this;
|
||||
once(event: 'listening', listener: (worker: Worker, address: Address) => void): this;
|
||||
once(event: 'message', listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
once(event: 'online', listener: (worker: Worker) => void): this;
|
||||
once(event: 'setup', listener: (settings: ClusterSettings) => void): this;
|
||||
once(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||
once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
once(event: "fork", listener: (worker: Worker) => void): this;
|
||||
once(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||
once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
once(event: "online", listener: (worker: Worker) => void): this;
|
||||
once(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||
|
||||
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependListener(event: 'disconnect', listener: (worker: Worker) => void): this;
|
||||
prependListener(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
prependListener(event: 'fork', listener: (worker: Worker) => void): this;
|
||||
prependListener(event: 'listening', listener: (worker: Worker, address: Address) => void): this;
|
||||
// the handle is a net.Socket or net.Server object, or undefined.
|
||||
prependListener(event: 'message', listener: (worker: Worker, message: any, handle?: net.Socket | net.Server) => void): this;
|
||||
prependListener(event: 'online', listener: (worker: Worker) => void): this;
|
||||
prependListener(event: 'setup', listener: (settings: ClusterSettings) => void): this;
|
||||
prependListener(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||
prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
prependListener(event: "fork", listener: (worker: Worker) => void): this;
|
||||
prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||
prependListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
prependListener(event: "online", listener: (worker: Worker) => void): this;
|
||||
prependListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||
|
||||
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependOnceListener(event: 'disconnect', listener: (worker: Worker) => void): this;
|
||||
prependOnceListener(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
prependOnceListener(event: 'fork', listener: (worker: Worker) => void): this;
|
||||
prependOnceListener(event: 'listening', listener: (worker: Worker, address: Address) => void): this;
|
||||
prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): this;
|
||||
prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this;
|
||||
prependOnceListener(event: "fork", listener: (worker: Worker) => void): this;
|
||||
prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): this;
|
||||
// the handle is a net.Socket or net.Server object, or undefined.
|
||||
prependOnceListener(event: 'message', listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this;
|
||||
prependOnceListener(event: 'online', listener: (worker: Worker) => void): this;
|
||||
prependOnceListener(event: 'setup', listener: (settings: ClusterSettings) => void): this;
|
||||
prependOnceListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this;
|
||||
prependOnceListener(event: "online", listener: (worker: Worker) => void): this;
|
||||
prependOnceListener(event: "setup", listener: (settings: ClusterSettings) => void): this;
|
||||
}
|
||||
const cluster: Cluster;
|
||||
export default cluster;
|
||||
}
|
||||
declare module 'node:cluster' {
|
||||
export * from 'cluster';
|
||||
export { default as default } from 'cluster';
|
||||
|
||||
function disconnect(callback?: () => void): void;
|
||||
function fork(env?: any): Worker;
|
||||
const isMaster: boolean;
|
||||
const isWorker: boolean;
|
||||
// TODO: cluster.schedulingPolicy
|
||||
const settings: ClusterSettings;
|
||||
function setupMaster(settings?: ClusterSettings): void;
|
||||
const worker: Worker;
|
||||
const workers: {
|
||||
[index: string]: Worker | undefined
|
||||
};
|
||||
|
||||
/**
|
||||
* events.EventEmitter
|
||||
* 1. disconnect
|
||||
* 2. exit
|
||||
* 3. fork
|
||||
* 4. listening
|
||||
* 5. message
|
||||
* 6. online
|
||||
* 7. setup
|
||||
*/
|
||||
function addListener(event: string, listener: (...args: any[]) => void): Cluster;
|
||||
function addListener(event: "disconnect", listener: (worker: Worker) => void): Cluster;
|
||||
function addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster;
|
||||
function addListener(event: "fork", listener: (worker: Worker) => void): Cluster;
|
||||
function addListener(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster;
|
||||
// the handle is a net.Socket or net.Server object, or undefined.
|
||||
function addListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster;
|
||||
function addListener(event: "online", listener: (worker: Worker) => void): Cluster;
|
||||
function addListener(event: "setup", listener: (settings: ClusterSettings) => void): Cluster;
|
||||
|
||||
function emit(event: string | symbol, ...args: any[]): boolean;
|
||||
function emit(event: "disconnect", worker: Worker): boolean;
|
||||
function emit(event: "exit", worker: Worker, code: number, signal: string): boolean;
|
||||
function emit(event: "fork", worker: Worker): boolean;
|
||||
function emit(event: "listening", worker: Worker, address: Address): boolean;
|
||||
function emit(event: "message", worker: Worker, message: any, handle: net.Socket | net.Server): boolean;
|
||||
function emit(event: "online", worker: Worker): boolean;
|
||||
function emit(event: "setup", settings: ClusterSettings): boolean;
|
||||
|
||||
function on(event: string, listener: (...args: any[]) => void): Cluster;
|
||||
function on(event: "disconnect", listener: (worker: Worker) => void): Cluster;
|
||||
function on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster;
|
||||
function on(event: "fork", listener: (worker: Worker) => void): Cluster;
|
||||
function on(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster;
|
||||
function on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
function on(event: "online", listener: (worker: Worker) => void): Cluster;
|
||||
function on(event: "setup", listener: (settings: ClusterSettings) => void): Cluster;
|
||||
|
||||
function once(event: string, listener: (...args: any[]) => void): Cluster;
|
||||
function once(event: "disconnect", listener: (worker: Worker) => void): Cluster;
|
||||
function once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster;
|
||||
function once(event: "fork", listener: (worker: Worker) => void): Cluster;
|
||||
function once(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster;
|
||||
function once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined.
|
||||
function once(event: "online", listener: (worker: Worker) => void): Cluster;
|
||||
function once(event: "setup", listener: (settings: ClusterSettings) => void): Cluster;
|
||||
|
||||
function removeListener(event: string, listener: (...args: any[]) => void): Cluster;
|
||||
function removeAllListeners(event?: string): Cluster;
|
||||
function setMaxListeners(n: number): Cluster;
|
||||
function getMaxListeners(): number;
|
||||
function listeners(event: string): Function[];
|
||||
function listenerCount(type: string): number;
|
||||
|
||||
function prependListener(event: string, listener: (...args: any[]) => void): Cluster;
|
||||
function prependListener(event: "disconnect", listener: (worker: Worker) => void): Cluster;
|
||||
function prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster;
|
||||
function prependListener(event: "fork", listener: (worker: Worker) => void): Cluster;
|
||||
function prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster;
|
||||
// the handle is a net.Socket or net.Server object, or undefined.
|
||||
function prependListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster;
|
||||
function prependListener(event: "online", listener: (worker: Worker) => void): Cluster;
|
||||
function prependListener(event: "setup", listener: (settings: ClusterSettings) => void): Cluster;
|
||||
|
||||
function prependOnceListener(event: string, listener: (...args: any[]) => void): Cluster;
|
||||
function prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): Cluster;
|
||||
function prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster;
|
||||
function prependOnceListener(event: "fork", listener: (worker: Worker) => void): Cluster;
|
||||
function prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster;
|
||||
// the handle is a net.Socket or net.Server object, or undefined.
|
||||
function prependOnceListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster;
|
||||
function prependOnceListener(event: "online", listener: (worker: Worker) => void): Cluster;
|
||||
function prependOnceListener(event: "setup", listener: (settings: ClusterSettings) => void): Cluster;
|
||||
|
||||
function eventNames(): string[];
|
||||
}
|
||||
|
||||
406
node_modules/@types/node/console.d.ts
generated
vendored
Executable file → Normal file
406
node_modules/@types/node/console.d.ts
generated
vendored
Executable file → Normal file
@@ -1,407 +1,3 @@
|
||||
/**
|
||||
* The `console` module provides a simple debugging console that is similar to the
|
||||
* JavaScript console mechanism provided by web browsers.
|
||||
*
|
||||
* The module exports two specific components:
|
||||
*
|
||||
* * A `Console` class with methods such as `console.log()`, `console.error()` and`console.warn()` that can be used to write to any Node.js stream.
|
||||
* * A global `console` instance configured to write to `process.stdout` and `process.stderr`. The global `console` can be used without calling`require('console')`.
|
||||
*
|
||||
* _**Warning**_: The global console object's methods are neither consistently
|
||||
* synchronous like the browser APIs they resemble, nor are they consistently
|
||||
* asynchronous like all other Node.js streams. See the `note on process I/O` for
|
||||
* more information.
|
||||
*
|
||||
* Example using the global `console`:
|
||||
*
|
||||
* ```js
|
||||
* console.log('hello world');
|
||||
* // Prints: hello world, to stdout
|
||||
* console.log('hello %s', 'world');
|
||||
* // Prints: hello world, to stdout
|
||||
* console.error(new Error('Whoops, something bad happened'));
|
||||
* // Prints error message and stack trace to stderr:
|
||||
* // Error: Whoops, something bad happened
|
||||
* // at [eval]:5:15
|
||||
* // at Script.runInThisContext (node:vm:132:18)
|
||||
* // at Object.runInThisContext (node:vm:309:38)
|
||||
* // at node:internal/process/execution:77:19
|
||||
* // at [eval]-wrapper:6:22
|
||||
* // at evalScript (node:internal/process/execution:76:60)
|
||||
* // at node:internal/main/eval_string:23:3
|
||||
*
|
||||
* const name = 'Will Robinson';
|
||||
* console.warn(`Danger ${name}! Danger!`);
|
||||
* // Prints: Danger Will Robinson! Danger!, to stderr
|
||||
* ```
|
||||
*
|
||||
* Example using the `Console` class:
|
||||
*
|
||||
* ```js
|
||||
* const out = getStreamSomehow();
|
||||
* const err = getStreamSomehow();
|
||||
* const myConsole = new console.Console(out, err);
|
||||
*
|
||||
* myConsole.log('hello world');
|
||||
* // Prints: hello world, to out
|
||||
* myConsole.log('hello %s', 'world');
|
||||
* // Prints: hello world, to out
|
||||
* myConsole.error(new Error('Whoops, something bad happened'));
|
||||
* // Prints: [Error: Whoops, something bad happened], to err
|
||||
*
|
||||
* const name = 'Will Robinson';
|
||||
* myConsole.warn(`Danger ${name}! Danger!`);
|
||||
* // Prints: Danger Will Robinson! Danger!, to err
|
||||
* ```
|
||||
* @see [source](https://github.com/nodejs/node/blob/v16.4.2/lib/console.js)
|
||||
*/
|
||||
declare module 'console' {
|
||||
import console = require('node:console');
|
||||
declare module "console" {
|
||||
export = console;
|
||||
}
|
||||
declare module 'node:console' {
|
||||
import { InspectOptions } from 'node:util';
|
||||
global {
|
||||
// This needs to be global to avoid TS2403 in case lib.dom.d.ts is present in the same build
|
||||
interface Console {
|
||||
Console: console.ConsoleConstructor;
|
||||
/**
|
||||
* `console.assert()` writes a message if `value` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy) or omitted. It only
|
||||
* writes a message and does not otherwise affect execution. The output always
|
||||
* starts with `"Assertion failed"`. If provided, `message` is formatted using `util.format()`.
|
||||
*
|
||||
* If `value` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy), nothing happens.
|
||||
*
|
||||
* ```js
|
||||
* console.assert(true, 'does nothing');
|
||||
*
|
||||
* console.assert(false, 'Whoops %s work', 'didn\'t');
|
||||
* // Assertion failed: Whoops didn't work
|
||||
*
|
||||
* console.assert();
|
||||
* // Assertion failed
|
||||
* ```
|
||||
* @since v0.1.101
|
||||
* @param value The value tested for being truthy.
|
||||
* @param message All arguments besides `value` are used as error message.
|
||||
*/
|
||||
assert(value: any, message?: string, ...optionalParams: any[]): void;
|
||||
/**
|
||||
* When `stdout` is a TTY, calling `console.clear()` will attempt to clear the
|
||||
* TTY. When `stdout` is not a TTY, this method does nothing.
|
||||
*
|
||||
* The specific operation of `console.clear()` can vary across operating systems
|
||||
* and terminal types. For most Linux operating systems, `console.clear()`operates similarly to the `clear` shell command. On Windows, `console.clear()`will clear only the output in the
|
||||
* current terminal viewport for the Node.js
|
||||
* binary.
|
||||
* @since v8.3.0
|
||||
*/
|
||||
clear(): void;
|
||||
/**
|
||||
* Maintains an internal counter specific to `label` and outputs to `stdout` the
|
||||
* number of times `console.count()` has been called with the given `label`.
|
||||
*
|
||||
* ```js
|
||||
* > console.count()
|
||||
* default: 1
|
||||
* undefined
|
||||
* > console.count('default')
|
||||
* default: 2
|
||||
* undefined
|
||||
* > console.count('abc')
|
||||
* abc: 1
|
||||
* undefined
|
||||
* > console.count('xyz')
|
||||
* xyz: 1
|
||||
* undefined
|
||||
* > console.count('abc')
|
||||
* abc: 2
|
||||
* undefined
|
||||
* > console.count()
|
||||
* default: 3
|
||||
* undefined
|
||||
* >
|
||||
* ```
|
||||
* @since v8.3.0
|
||||
* @param label The display label for the counter.
|
||||
*/
|
||||
count(label?: string): void;
|
||||
/**
|
||||
* Resets the internal counter specific to `label`.
|
||||
*
|
||||
* ```js
|
||||
* > console.count('abc');
|
||||
* abc: 1
|
||||
* undefined
|
||||
* > console.countReset('abc');
|
||||
* undefined
|
||||
* > console.count('abc');
|
||||
* abc: 1
|
||||
* undefined
|
||||
* >
|
||||
* ```
|
||||
* @since v8.3.0
|
||||
* @param label The display label for the counter.
|
||||
*/
|
||||
countReset(label?: string): void;
|
||||
/**
|
||||
* The `console.debug()` function is an alias for {@link log}.
|
||||
* @since v8.0.0
|
||||
*/
|
||||
debug(message?: any, ...optionalParams: any[]): void;
|
||||
/**
|
||||
* Uses `util.inspect()` on `obj` and prints the resulting string to `stdout`.
|
||||
* This function bypasses any custom `inspect()` function defined on `obj`.
|
||||
* @since v0.1.101
|
||||
*/
|
||||
dir(obj: any, options?: InspectOptions): void;
|
||||
/**
|
||||
* This method calls `console.log()` passing it the arguments received.
|
||||
* This method does not produce any XML formatting.
|
||||
* @since v8.0.0
|
||||
*/
|
||||
dirxml(...data: any[]): void;
|
||||
/**
|
||||
* Prints to `stderr` with newline. Multiple arguments can be passed, with the
|
||||
* first used as the primary message and all additional used as substitution
|
||||
* values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html) (the arguments are all passed to `util.format()`).
|
||||
*
|
||||
* ```js
|
||||
* const code = 5;
|
||||
* console.error('error #%d', code);
|
||||
* // Prints: error #5, to stderr
|
||||
* console.error('error', code);
|
||||
* // Prints: error 5, to stderr
|
||||
* ```
|
||||
*
|
||||
* If formatting elements (e.g. `%d`) are not found in the first string then `util.inspect()` is called on each argument and the resulting string
|
||||
* values are concatenated. See `util.format()` for more information.
|
||||
* @since v0.1.100
|
||||
*/
|
||||
error(message?: any, ...optionalParams: any[]): void;
|
||||
/**
|
||||
* Increases indentation of subsequent lines by spaces for `groupIndentation`length.
|
||||
*
|
||||
* If one or more `label`s are provided, those are printed first without the
|
||||
* additional indentation.
|
||||
* @since v8.5.0
|
||||
*/
|
||||
group(...label: any[]): void;
|
||||
/**
|
||||
* An alias for {@link group}.
|
||||
* @since v8.5.0
|
||||
*/
|
||||
groupCollapsed(...label: any[]): void;
|
||||
/**
|
||||
* Decreases indentation of subsequent lines by spaces for `groupIndentation`length.
|
||||
* @since v8.5.0
|
||||
*/
|
||||
groupEnd(): void;
|
||||
/**
|
||||
* The `console.info()` function is an alias for {@link log}.
|
||||
* @since v0.1.100
|
||||
*/
|
||||
info(message?: any, ...optionalParams: any[]): void;
|
||||
/**
|
||||
* Prints to `stdout` with newline. Multiple arguments can be passed, with the
|
||||
* first used as the primary message and all additional used as substitution
|
||||
* values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html) (the arguments are all passed to `util.format()`).
|
||||
*
|
||||
* ```js
|
||||
* const count = 5;
|
||||
* console.log('count: %d', count);
|
||||
* // Prints: count: 5, to stdout
|
||||
* console.log('count:', count);
|
||||
* // Prints: count: 5, to stdout
|
||||
* ```
|
||||
*
|
||||
* See `util.format()` for more information.
|
||||
* @since v0.1.100
|
||||
*/
|
||||
log(message?: any, ...optionalParams: any[]): void;
|
||||
/**
|
||||
* Try to construct a table with the columns of the properties of `tabularData`(or use `properties`) and rows of `tabularData` and log it. Falls back to just
|
||||
* logging the argument if it can’t be parsed as tabular.
|
||||
*
|
||||
* ```js
|
||||
* // These can't be parsed as tabular data
|
||||
* console.table(Symbol());
|
||||
* // Symbol()
|
||||
*
|
||||
* console.table(undefined);
|
||||
* // undefined
|
||||
*
|
||||
* console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }]);
|
||||
* // ┌─────────┬─────┬─────┐
|
||||
* // │ (index) │ a │ b │
|
||||
* // ├─────────┼─────┼─────┤
|
||||
* // │ 0 │ 1 │ 'Y' │
|
||||
* // │ 1 │ 'Z' │ 2 │
|
||||
* // └─────────┴─────┴─────┘
|
||||
*
|
||||
* console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }], ['a']);
|
||||
* // ┌─────────┬─────┐
|
||||
* // │ (index) │ a │
|
||||
* // ├─────────┼─────┤
|
||||
* // │ 0 │ 1 │
|
||||
* // │ 1 │ 'Z' │
|
||||
* // └─────────┴─────┘
|
||||
* ```
|
||||
* @since v10.0.0
|
||||
* @param properties Alternate properties for constructing the table.
|
||||
*/
|
||||
table(tabularData: any, properties?: ReadonlyArray<string>): void;
|
||||
/**
|
||||
* Starts a timer that can be used to compute the duration of an operation. Timers
|
||||
* are identified by a unique `label`. Use the same `label` when calling {@link timeEnd} to stop the timer and output the elapsed time in
|
||||
* suitable time units to `stdout`. For example, if the elapsed
|
||||
* time is 3869ms, `console.timeEnd()` displays "3.869s".
|
||||
* @since v0.1.104
|
||||
*/
|
||||
time(label?: string): void;
|
||||
/**
|
||||
* Stops a timer that was previously started by calling {@link time} and
|
||||
* prints the result to `stdout`:
|
||||
*
|
||||
* ```js
|
||||
* console.time('100-elements');
|
||||
* for (let i = 0; i < 100; i++) {}
|
||||
* console.timeEnd('100-elements');
|
||||
* // prints 100-elements: 225.438ms
|
||||
* ```
|
||||
* @since v0.1.104
|
||||
*/
|
||||
timeEnd(label?: string): void;
|
||||
/**
|
||||
* For a timer that was previously started by calling {@link time}, prints
|
||||
* the elapsed time and other `data` arguments to `stdout`:
|
||||
*
|
||||
* ```js
|
||||
* console.time('process');
|
||||
* const value = expensiveProcess1(); // Returns 42
|
||||
* console.timeLog('process', value);
|
||||
* // Prints "process: 365.227ms 42".
|
||||
* doExpensiveProcess2(value);
|
||||
* console.timeEnd('process');
|
||||
* ```
|
||||
* @since v10.7.0
|
||||
*/
|
||||
timeLog(label?: string, ...data: any[]): void;
|
||||
/**
|
||||
* Prints to `stderr` the string `'Trace: '`, followed by the `util.format()` formatted message and stack trace to the current position in the code.
|
||||
*
|
||||
* ```js
|
||||
* console.trace('Show me');
|
||||
* // Prints: (stack trace will vary based on where trace is called)
|
||||
* // Trace: Show me
|
||||
* // at repl:2:9
|
||||
* // at REPLServer.defaultEval (repl.js:248:27)
|
||||
* // at bound (domain.js:287:14)
|
||||
* // at REPLServer.runBound [as eval] (domain.js:300:12)
|
||||
* // at REPLServer.<anonymous> (repl.js:412:12)
|
||||
* // at emitOne (events.js:82:20)
|
||||
* // at REPLServer.emit (events.js:169:7)
|
||||
* // at REPLServer.Interface._onLine (readline.js:210:10)
|
||||
* // at REPLServer.Interface._line (readline.js:549:8)
|
||||
* // at REPLServer.Interface._ttyWrite (readline.js:826:14)
|
||||
* ```
|
||||
* @since v0.1.104
|
||||
*/
|
||||
trace(message?: any, ...optionalParams: any[]): void;
|
||||
/**
|
||||
* The `console.warn()` function is an alias for {@link error}.
|
||||
* @since v0.1.100
|
||||
*/
|
||||
warn(message?: any, ...optionalParams: any[]): void;
|
||||
// --- Inspector mode only ---
|
||||
/**
|
||||
* This method does not display anything unless used in the inspector.
|
||||
* Starts a JavaScript CPU profile with an optional label.
|
||||
*/
|
||||
profile(label?: string): void;
|
||||
/**
|
||||
* This method does not display anything unless used in the inspector.
|
||||
* Stops the current JavaScript CPU profiling session if one has been started and prints the report to the Profiles panel of the inspector.
|
||||
*/
|
||||
profileEnd(label?: string): void;
|
||||
/**
|
||||
* This method does not display anything unless used in the inspector.
|
||||
* Adds an event with the label `label` to the Timeline panel of the inspector.
|
||||
*/
|
||||
timeStamp(label?: string): void;
|
||||
}
|
||||
/**
|
||||
* The `console` module provides a simple debugging console that is similar to the
|
||||
* JavaScript console mechanism provided by web browsers.
|
||||
*
|
||||
* The module exports two specific components:
|
||||
*
|
||||
* * A `Console` class with methods such as `console.log()`, `console.error()` and`console.warn()` that can be used to write to any Node.js stream.
|
||||
* * A global `console` instance configured to write to `process.stdout` and `process.stderr`. The global `console` can be used without calling`require('console')`.
|
||||
*
|
||||
* _**Warning**_: The global console object's methods are neither consistently
|
||||
* synchronous like the browser APIs they resemble, nor are they consistently
|
||||
* asynchronous like all other Node.js streams. See the `note on process I/O` for
|
||||
* more information.
|
||||
*
|
||||
* Example using the global `console`:
|
||||
*
|
||||
* ```js
|
||||
* console.log('hello world');
|
||||
* // Prints: hello world, to stdout
|
||||
* console.log('hello %s', 'world');
|
||||
* // Prints: hello world, to stdout
|
||||
* console.error(new Error('Whoops, something bad happened'));
|
||||
* // Prints error message and stack trace to stderr:
|
||||
* // Error: Whoops, something bad happened
|
||||
* // at [eval]:5:15
|
||||
* // at Script.runInThisContext (node:vm:132:18)
|
||||
* // at Object.runInThisContext (node:vm:309:38)
|
||||
* // at node:internal/process/execution:77:19
|
||||
* // at [eval]-wrapper:6:22
|
||||
* // at evalScript (node:internal/process/execution:76:60)
|
||||
* // at node:internal/main/eval_string:23:3
|
||||
*
|
||||
* const name = 'Will Robinson';
|
||||
* console.warn(`Danger ${name}! Danger!`);
|
||||
* // Prints: Danger Will Robinson! Danger!, to stderr
|
||||
* ```
|
||||
*
|
||||
* Example using the `Console` class:
|
||||
*
|
||||
* ```js
|
||||
* const out = getStreamSomehow();
|
||||
* const err = getStreamSomehow();
|
||||
* const myConsole = new console.Console(out, err);
|
||||
*
|
||||
* myConsole.log('hello world');
|
||||
* // Prints: hello world, to out
|
||||
* myConsole.log('hello %s', 'world');
|
||||
* // Prints: hello world, to out
|
||||
* myConsole.error(new Error('Whoops, something bad happened'));
|
||||
* // Prints: [Error: Whoops, something bad happened], to err
|
||||
*
|
||||
* const name = 'Will Robinson';
|
||||
* myConsole.warn(`Danger ${name}! Danger!`);
|
||||
* // Prints: Danger Will Robinson! Danger!, to err
|
||||
* ```
|
||||
* @see [source](https://github.com/nodejs/node/blob/v16.4.2/lib/console.js)
|
||||
*/
|
||||
namespace console {
|
||||
interface ConsoleConstructorOptions {
|
||||
stdout: NodeJS.WritableStream;
|
||||
stderr?: NodeJS.WritableStream | undefined;
|
||||
ignoreErrors?: boolean | undefined;
|
||||
colorMode?: boolean | 'auto' | undefined;
|
||||
inspectOptions?: InspectOptions | undefined;
|
||||
}
|
||||
interface ConsoleConstructor {
|
||||
prototype: Console;
|
||||
new (stdout: NodeJS.WritableStream, stderr?: NodeJS.WritableStream, ignoreErrors?: boolean): Console;
|
||||
new (options: ConsoleConstructorOptions): Console;
|
||||
}
|
||||
}
|
||||
var console: Console;
|
||||
}
|
||||
export = globalThis.console;
|
||||
}
|
||||
|
||||
462
node_modules/@types/node/constants.d.ts
generated
vendored
Executable file → Normal file
462
node_modules/@types/node/constants.d.ts
generated
vendored
Executable file → Normal file
@@ -1,18 +1,448 @@
|
||||
/** @deprecated since v6.3.0 - use constants property exposed by the relevant module instead. */
|
||||
declare module 'constants' {
|
||||
import { constants as osConstants, SignalConstants } from 'node:os';
|
||||
import { constants as cryptoConstants } from 'node:crypto';
|
||||
import { constants as fsConstants } from 'node:fs';
|
||||
|
||||
const exp: typeof osConstants.errno &
|
||||
typeof osConstants.priority &
|
||||
SignalConstants &
|
||||
typeof cryptoConstants &
|
||||
typeof fsConstants;
|
||||
export = exp;
|
||||
}
|
||||
|
||||
declare module 'node:constants' {
|
||||
import constants = require('constants');
|
||||
export = constants;
|
||||
declare module "constants" {
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.E2BIG` instead. */
|
||||
const E2BIG: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EACCES` instead. */
|
||||
const EACCES: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EADDRINUSE` instead. */
|
||||
const EADDRINUSE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EADDRNOTAVAIL` instead. */
|
||||
const EADDRNOTAVAIL: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EAFNOSUPPORT` instead. */
|
||||
const EAFNOSUPPORT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EAGAIN` instead. */
|
||||
const EAGAIN: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EALREADY` instead. */
|
||||
const EALREADY: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EBADF` instead. */
|
||||
const EBADF: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EBADMSG` instead. */
|
||||
const EBADMSG: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EBUSY` instead. */
|
||||
const EBUSY: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ECANCELED` instead. */
|
||||
const ECANCELED: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ECHILD` instead. */
|
||||
const ECHILD: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ECONNABORTED` instead. */
|
||||
const ECONNABORTED: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ECONNREFUSED` instead. */
|
||||
const ECONNREFUSED: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ECONNRESET` instead. */
|
||||
const ECONNRESET: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EDEADLK` instead. */
|
||||
const EDEADLK: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EDESTADDRREQ` instead. */
|
||||
const EDESTADDRREQ: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EDOM` instead. */
|
||||
const EDOM: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EEXIST` instead. */
|
||||
const EEXIST: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EFAULT` instead. */
|
||||
const EFAULT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EFBIG` instead. */
|
||||
const EFBIG: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EHOSTUNREACH` instead. */
|
||||
const EHOSTUNREACH: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EIDRM` instead. */
|
||||
const EIDRM: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EILSEQ` instead. */
|
||||
const EILSEQ: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EINPROGRESS` instead. */
|
||||
const EINPROGRESS: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EINTR` instead. */
|
||||
const EINTR: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EINVAL` instead. */
|
||||
const EINVAL: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EIO` instead. */
|
||||
const EIO: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EISCONN` instead. */
|
||||
const EISCONN: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EISDIR` instead. */
|
||||
const EISDIR: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ELOOP` instead. */
|
||||
const ELOOP: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EMFILE` instead. */
|
||||
const EMFILE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EMLINK` instead. */
|
||||
const EMLINK: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EMSGSIZE` instead. */
|
||||
const EMSGSIZE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENAMETOOLONG` instead. */
|
||||
const ENAMETOOLONG: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENETDOWN` instead. */
|
||||
const ENETDOWN: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENETRESET` instead. */
|
||||
const ENETRESET: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENETUNREACH` instead. */
|
||||
const ENETUNREACH: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENFILE` instead. */
|
||||
const ENFILE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOBUFS` instead. */
|
||||
const ENOBUFS: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENODATA` instead. */
|
||||
const ENODATA: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENODEV` instead. */
|
||||
const ENODEV: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOENT` instead. */
|
||||
const ENOENT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOEXEC` instead. */
|
||||
const ENOEXEC: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOLCK` instead. */
|
||||
const ENOLCK: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOLINK` instead. */
|
||||
const ENOLINK: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOMEM` instead. */
|
||||
const ENOMEM: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOMSG` instead. */
|
||||
const ENOMSG: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOPROTOOPT` instead. */
|
||||
const ENOPROTOOPT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOSPC` instead. */
|
||||
const ENOSPC: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOSR` instead. */
|
||||
const ENOSR: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOSTR` instead. */
|
||||
const ENOSTR: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOSYS` instead. */
|
||||
const ENOSYS: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOTCONN` instead. */
|
||||
const ENOTCONN: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOTDIR` instead. */
|
||||
const ENOTDIR: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOTEMPTY` instead. */
|
||||
const ENOTEMPTY: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOTSOCK` instead. */
|
||||
const ENOTSOCK: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOTSUP` instead. */
|
||||
const ENOTSUP: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENOTTY` instead. */
|
||||
const ENOTTY: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ENXIO` instead. */
|
||||
const ENXIO: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EOPNOTSUPP` instead. */
|
||||
const EOPNOTSUPP: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EOVERFLOW` instead. */
|
||||
const EOVERFLOW: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EPERM` instead. */
|
||||
const EPERM: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EPIPE` instead. */
|
||||
const EPIPE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EPROTO` instead. */
|
||||
const EPROTO: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EPROTONOSUPPORT` instead. */
|
||||
const EPROTONOSUPPORT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EPROTOTYPE` instead. */
|
||||
const EPROTOTYPE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ERANGE` instead. */
|
||||
const ERANGE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EROFS` instead. */
|
||||
const EROFS: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ESPIPE` instead. */
|
||||
const ESPIPE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ESRCH` instead. */
|
||||
const ESRCH: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ETIME` instead. */
|
||||
const ETIME: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ETIMEDOUT` instead. */
|
||||
const ETIMEDOUT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.ETXTBSY` instead. */
|
||||
const ETXTBSY: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EWOULDBLOCK` instead. */
|
||||
const EWOULDBLOCK: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.EXDEV` instead. */
|
||||
const EXDEV: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEINTR` instead. */
|
||||
const WSAEINTR: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEBADF` instead. */
|
||||
const WSAEBADF: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEACCES` instead. */
|
||||
const WSAEACCES: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEFAULT` instead. */
|
||||
const WSAEFAULT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEINVAL` instead. */
|
||||
const WSAEINVAL: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEMFILE` instead. */
|
||||
const WSAEMFILE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEWOULDBLOCK` instead. */
|
||||
const WSAEWOULDBLOCK: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEINPROGRESS` instead. */
|
||||
const WSAEINPROGRESS: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEALREADY` instead. */
|
||||
const WSAEALREADY: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENOTSOCK` instead. */
|
||||
const WSAENOTSOCK: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEDESTADDRREQ` instead. */
|
||||
const WSAEDESTADDRREQ: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEMSGSIZE` instead. */
|
||||
const WSAEMSGSIZE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEPROTOTYPE` instead. */
|
||||
const WSAEPROTOTYPE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENOPROTOOPT` instead. */
|
||||
const WSAENOPROTOOPT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEPROTONOSUPPORT` instead. */
|
||||
const WSAEPROTONOSUPPORT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAESOCKTNOSUPPORT` instead. */
|
||||
const WSAESOCKTNOSUPPORT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEOPNOTSUPP` instead. */
|
||||
const WSAEOPNOTSUPP: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEPFNOSUPPORT` instead. */
|
||||
const WSAEPFNOSUPPORT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEAFNOSUPPORT` instead. */
|
||||
const WSAEAFNOSUPPORT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEADDRINUSE` instead. */
|
||||
const WSAEADDRINUSE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEADDRNOTAVAIL` instead. */
|
||||
const WSAEADDRNOTAVAIL: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENETDOWN` instead. */
|
||||
const WSAENETDOWN: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENETUNREACH` instead. */
|
||||
const WSAENETUNREACH: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENETRESET` instead. */
|
||||
const WSAENETRESET: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAECONNABORTED` instead. */
|
||||
const WSAECONNABORTED: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAECONNRESET` instead. */
|
||||
const WSAECONNRESET: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENOBUFS` instead. */
|
||||
const WSAENOBUFS: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEISCONN` instead. */
|
||||
const WSAEISCONN: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENOTCONN` instead. */
|
||||
const WSAENOTCONN: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAESHUTDOWN` instead. */
|
||||
const WSAESHUTDOWN: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAETOOMANYREFS` instead. */
|
||||
const WSAETOOMANYREFS: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAETIMEDOUT` instead. */
|
||||
const WSAETIMEDOUT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAECONNREFUSED` instead. */
|
||||
const WSAECONNREFUSED: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAELOOP` instead. */
|
||||
const WSAELOOP: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENAMETOOLONG` instead. */
|
||||
const WSAENAMETOOLONG: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEHOSTDOWN` instead. */
|
||||
const WSAEHOSTDOWN: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEHOSTUNREACH` instead. */
|
||||
const WSAEHOSTUNREACH: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENOTEMPTY` instead. */
|
||||
const WSAENOTEMPTY: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEPROCLIM` instead. */
|
||||
const WSAEPROCLIM: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEUSERS` instead. */
|
||||
const WSAEUSERS: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEDQUOT` instead. */
|
||||
const WSAEDQUOT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAESTALE` instead. */
|
||||
const WSAESTALE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEREMOTE` instead. */
|
||||
const WSAEREMOTE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSASYSNOTREADY` instead. */
|
||||
const WSASYSNOTREADY: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAVERNOTSUPPORTED` instead. */
|
||||
const WSAVERNOTSUPPORTED: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSANOTINITIALISED` instead. */
|
||||
const WSANOTINITIALISED: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEDISCON` instead. */
|
||||
const WSAEDISCON: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAENOMORE` instead. */
|
||||
const WSAENOMORE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAECANCELLED` instead. */
|
||||
const WSAECANCELLED: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEINVALIDPROCTABLE` instead. */
|
||||
const WSAEINVALIDPROCTABLE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEINVALIDPROVIDER` instead. */
|
||||
const WSAEINVALIDPROVIDER: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEPROVIDERFAILEDINIT` instead. */
|
||||
const WSAEPROVIDERFAILEDINIT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSASYSCALLFAILURE` instead. */
|
||||
const WSASYSCALLFAILURE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSASERVICE_NOT_FOUND` instead. */
|
||||
const WSASERVICE_NOT_FOUND: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSATYPE_NOT_FOUND` instead. */
|
||||
const WSATYPE_NOT_FOUND: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSA_E_NO_MORE` instead. */
|
||||
const WSA_E_NO_MORE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSA_E_CANCELLED` instead. */
|
||||
const WSA_E_CANCELLED: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.errno.WSAEREFUSED` instead. */
|
||||
const WSAEREFUSED: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGHUP` instead. */
|
||||
const SIGHUP: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGINT` instead. */
|
||||
const SIGINT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGILL` instead. */
|
||||
const SIGILL: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGABRT` instead. */
|
||||
const SIGABRT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGFPE` instead. */
|
||||
const SIGFPE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGKILL` instead. */
|
||||
const SIGKILL: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGSEGV` instead. */
|
||||
const SIGSEGV: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGTERM` instead. */
|
||||
const SIGTERM: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGBREAK` instead. */
|
||||
const SIGBREAK: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGWINCH` instead. */
|
||||
const SIGWINCH: number;
|
||||
const SSL_OP_ALL: number;
|
||||
const SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION: number;
|
||||
const SSL_OP_CIPHER_SERVER_PREFERENCE: number;
|
||||
const SSL_OP_CISCO_ANYCONNECT: number;
|
||||
const SSL_OP_COOKIE_EXCHANGE: number;
|
||||
const SSL_OP_CRYPTOPRO_TLSEXT_BUG: number;
|
||||
const SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS: number;
|
||||
const SSL_OP_EPHEMERAL_RSA: number;
|
||||
const SSL_OP_LEGACY_SERVER_CONNECT: number;
|
||||
const SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER: number;
|
||||
const SSL_OP_MICROSOFT_SESS_ID_BUG: number;
|
||||
const SSL_OP_MSIE_SSLV2_RSA_PADDING: number;
|
||||
const SSL_OP_NETSCAPE_CA_DN_BUG: number;
|
||||
const SSL_OP_NETSCAPE_CHALLENGE_BUG: number;
|
||||
const SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG: number;
|
||||
const SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG: number;
|
||||
const SSL_OP_NO_COMPRESSION: number;
|
||||
const SSL_OP_NO_QUERY_MTU: number;
|
||||
const SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION: number;
|
||||
const SSL_OP_NO_SSLv2: number;
|
||||
const SSL_OP_NO_SSLv3: number;
|
||||
const SSL_OP_NO_TICKET: number;
|
||||
const SSL_OP_NO_TLSv1: number;
|
||||
const SSL_OP_NO_TLSv1_1: number;
|
||||
const SSL_OP_NO_TLSv1_2: number;
|
||||
const SSL_OP_PKCS1_CHECK_1: number;
|
||||
const SSL_OP_PKCS1_CHECK_2: number;
|
||||
const SSL_OP_SINGLE_DH_USE: number;
|
||||
const SSL_OP_SINGLE_ECDH_USE: number;
|
||||
const SSL_OP_SSLEAY_080_CLIENT_DH_BUG: number;
|
||||
const SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG: number;
|
||||
const SSL_OP_TLS_BLOCK_PADDING_BUG: number;
|
||||
const SSL_OP_TLS_D5_BUG: number;
|
||||
const SSL_OP_TLS_ROLLBACK_BUG: number;
|
||||
const ENGINE_METHOD_DSA: number;
|
||||
const ENGINE_METHOD_DH: number;
|
||||
const ENGINE_METHOD_RAND: number;
|
||||
const ENGINE_METHOD_ECDH: number;
|
||||
const ENGINE_METHOD_ECDSA: number;
|
||||
const ENGINE_METHOD_CIPHERS: number;
|
||||
const ENGINE_METHOD_DIGESTS: number;
|
||||
const ENGINE_METHOD_STORE: number;
|
||||
const ENGINE_METHOD_PKEY_METHS: number;
|
||||
const ENGINE_METHOD_PKEY_ASN1_METHS: number;
|
||||
const ENGINE_METHOD_ALL: number;
|
||||
const ENGINE_METHOD_NONE: number;
|
||||
const DH_CHECK_P_NOT_SAFE_PRIME: number;
|
||||
const DH_CHECK_P_NOT_PRIME: number;
|
||||
const DH_UNABLE_TO_CHECK_GENERATOR: number;
|
||||
const DH_NOT_SUITABLE_GENERATOR: number;
|
||||
const RSA_PKCS1_PADDING: number;
|
||||
const RSA_SSLV23_PADDING: number;
|
||||
const RSA_NO_PADDING: number;
|
||||
const RSA_PKCS1_OAEP_PADDING: number;
|
||||
const RSA_X931_PADDING: number;
|
||||
const RSA_PKCS1_PSS_PADDING: number;
|
||||
const POINT_CONVERSION_COMPRESSED: number;
|
||||
const POINT_CONVERSION_UNCOMPRESSED: number;
|
||||
const POINT_CONVERSION_HYBRID: number;
|
||||
const O_RDONLY: number;
|
||||
const O_WRONLY: number;
|
||||
const O_RDWR: number;
|
||||
const S_IFMT: number;
|
||||
const S_IFREG: number;
|
||||
const S_IFDIR: number;
|
||||
const S_IFCHR: number;
|
||||
const S_IFBLK: number;
|
||||
const S_IFIFO: number;
|
||||
const S_IFSOCK: number;
|
||||
const S_IRWXU: number;
|
||||
const S_IRUSR: number;
|
||||
const S_IWUSR: number;
|
||||
const S_IXUSR: number;
|
||||
const S_IRWXG: number;
|
||||
const S_IRGRP: number;
|
||||
const S_IWGRP: number;
|
||||
const S_IXGRP: number;
|
||||
const S_IRWXO: number;
|
||||
const S_IROTH: number;
|
||||
const S_IWOTH: number;
|
||||
const S_IXOTH: number;
|
||||
const S_IFLNK: number;
|
||||
const O_CREAT: number;
|
||||
const O_EXCL: number;
|
||||
const O_NOCTTY: number;
|
||||
const O_DIRECTORY: number;
|
||||
const O_NOATIME: number;
|
||||
const O_NOFOLLOW: number;
|
||||
const O_SYNC: number;
|
||||
const O_DSYNC: number;
|
||||
const O_SYMLINK: number;
|
||||
const O_DIRECT: number;
|
||||
const O_NONBLOCK: number;
|
||||
const O_TRUNC: number;
|
||||
const O_APPEND: number;
|
||||
const F_OK: number;
|
||||
const R_OK: number;
|
||||
const W_OK: number;
|
||||
const X_OK: number;
|
||||
const COPYFILE_EXCL: number;
|
||||
const COPYFILE_FICLONE: number;
|
||||
const COPYFILE_FICLONE_FORCE: number;
|
||||
const UV_UDP_REUSEADDR: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGQUIT` instead. */
|
||||
const SIGQUIT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGTRAP` instead. */
|
||||
const SIGTRAP: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGIOT` instead. */
|
||||
const SIGIOT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGBUS` instead. */
|
||||
const SIGBUS: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGUSR1` instead. */
|
||||
const SIGUSR1: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGUSR2` instead. */
|
||||
const SIGUSR2: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGPIPE` instead. */
|
||||
const SIGPIPE: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGALRM` instead. */
|
||||
const SIGALRM: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGCHLD` instead. */
|
||||
const SIGCHLD: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGSTKFLT` instead. */
|
||||
const SIGSTKFLT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGCONT` instead. */
|
||||
const SIGCONT: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGSTOP` instead. */
|
||||
const SIGSTOP: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGTSTP` instead. */
|
||||
const SIGTSTP: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGTTIN` instead. */
|
||||
const SIGTTIN: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGTTOU` instead. */
|
||||
const SIGTTOU: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGURG` instead. */
|
||||
const SIGURG: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGXCPU` instead. */
|
||||
const SIGXCPU: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGXFSZ` instead. */
|
||||
const SIGXFSZ: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGVTALRM` instead. */
|
||||
const SIGVTALRM: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGPROF` instead. */
|
||||
const SIGPROF: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGIO` instead. */
|
||||
const SIGIO: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGPOLL` instead. */
|
||||
const SIGPOLL: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGPWR` instead. */
|
||||
const SIGPWR: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGSYS` instead. */
|
||||
const SIGSYS: number;
|
||||
/** @deprecated since v6.3.0 - use `os.constants.signals.SIGUNUSED` instead. */
|
||||
const SIGUNUSED: number;
|
||||
const defaultCoreCipherList: string;
|
||||
const defaultCipherList: string;
|
||||
const ENGINE_METHOD_RSA: number;
|
||||
const ALPN_ENABLED: number;
|
||||
}
|
||||
|
||||
3844
node_modules/@types/node/crypto.d.ts
generated
vendored
Executable file → Normal file
3844
node_modules/@types/node/crypto.d.ts
generated
vendored
Executable file → Normal file
File diff suppressed because it is too large
Load Diff
534
node_modules/@types/node/dgram.d.ts
generated
vendored
Executable file → Normal file
534
node_modules/@types/node/dgram.d.ts
generated
vendored
Executable file → Normal file
@@ -1,490 +1,70 @@
|
||||
/**
|
||||
* The `dgram` module provides an implementation of UDP datagram sockets.
|
||||
*
|
||||
* ```js
|
||||
* const dgram = require('dgram');
|
||||
* const server = dgram.createSocket('udp4');
|
||||
*
|
||||
* server.on('error', (err) => {
|
||||
* console.log(`server error:\n${err.stack}`);
|
||||
* server.close();
|
||||
* });
|
||||
*
|
||||
* server.on('message', (msg, rinfo) => {
|
||||
* console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`);
|
||||
* });
|
||||
*
|
||||
* server.on('listening', () => {
|
||||
* const address = server.address();
|
||||
* console.log(`server listening ${address.address}:${address.port}`);
|
||||
* });
|
||||
*
|
||||
* server.bind(41234);
|
||||
* // Prints: server listening 0.0.0.0:41234
|
||||
* ```
|
||||
* @see [source](https://github.com/nodejs/node/blob/v16.4.2/lib/dgram.js)
|
||||
*/
|
||||
declare module 'dgram' {
|
||||
import { AddressInfo } from 'node:net';
|
||||
import * as dns from 'node:dns';
|
||||
import { EventEmitter, Abortable } from 'node:events';
|
||||
declare module "dgram" {
|
||||
import { AddressInfo } from "net";
|
||||
import * as dns from "dns";
|
||||
import * as events from "events";
|
||||
|
||||
interface RemoteInfo {
|
||||
address: string;
|
||||
family: 'IPv4' | 'IPv6';
|
||||
port: number;
|
||||
size: number;
|
||||
}
|
||||
|
||||
interface BindOptions {
|
||||
port?: number | undefined;
|
||||
address?: string | undefined;
|
||||
exclusive?: boolean | undefined;
|
||||
fd?: number | undefined;
|
||||
port?: number;
|
||||
address?: string;
|
||||
exclusive?: boolean;
|
||||
fd?: number;
|
||||
}
|
||||
type SocketType = 'udp4' | 'udp6';
|
||||
interface SocketOptions extends Abortable {
|
||||
|
||||
type SocketType = "udp4" | "udp6";
|
||||
|
||||
interface SocketOptions {
|
||||
type: SocketType;
|
||||
reuseAddr?: boolean | undefined;
|
||||
reuseAddr?: boolean;
|
||||
/**
|
||||
* @default false
|
||||
*/
|
||||
ipv6Only?: boolean | undefined;
|
||||
recvBufferSize?: number | undefined;
|
||||
sendBufferSize?: number | undefined;
|
||||
lookup?: ((hostname: string, options: dns.LookupOneOptions, callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void) => void) | undefined;
|
||||
ipv6Only?: boolean;
|
||||
recvBufferSize?: number;
|
||||
sendBufferSize?: number;
|
||||
lookup?: (hostname: string, options: dns.LookupOneOptions, callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void) => void;
|
||||
}
|
||||
/**
|
||||
* Creates a `dgram.Socket` object. Once the socket is created, calling `socket.bind()` will instruct the socket to begin listening for datagram
|
||||
* messages. When `address` and `port` are not passed to `socket.bind()` the
|
||||
* method will bind the socket to the "all interfaces" address on a random port
|
||||
* (it does the right thing for both `udp4` and `udp6` sockets). The bound address
|
||||
* and port can be retrieved using `socket.address().address` and `socket.address().port`.
|
||||
*
|
||||
* If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.close()` on the socket:
|
||||
*
|
||||
* ```js
|
||||
* const controller = new AbortController();
|
||||
* const { signal } = controller;
|
||||
* const server = dgram.createSocket({ type: 'udp4', signal });
|
||||
* server.on('message', (msg, rinfo) => {
|
||||
* console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`);
|
||||
* });
|
||||
* // Later, when you want to close the server.
|
||||
* controller.abort();
|
||||
* ```
|
||||
* @since v0.11.13
|
||||
* @param options Available options are:
|
||||
* @param callback Attached as a listener for `'message'` events. Optional.
|
||||
*/
|
||||
|
||||
function createSocket(type: SocketType, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket;
|
||||
function createSocket(options: SocketOptions, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket;
|
||||
/**
|
||||
* Encapsulates the datagram functionality.
|
||||
*
|
||||
* New instances of `dgram.Socket` are created using {@link createSocket}.
|
||||
* The `new` keyword is not to be used to create `dgram.Socket` instances.
|
||||
* @since v0.1.99
|
||||
*/
|
||||
class Socket extends EventEmitter {
|
||||
/**
|
||||
* Tells the kernel to join a multicast group at the given `multicastAddress` and`multicastInterface` using the `IP_ADD_MEMBERSHIP` socket option. If the`multicastInterface` argument is not
|
||||
* specified, the operating system will choose
|
||||
* one interface and will add membership to it. To add membership to every
|
||||
* available interface, call `addMembership` multiple times, once per interface.
|
||||
*
|
||||
* When called on an unbound socket, this method will implicitly bind to a random
|
||||
* port, listening on all interfaces.
|
||||
*
|
||||
* When sharing a UDP socket across multiple `cluster` workers, the`socket.addMembership()` function must be called only once or an`EADDRINUSE` error will occur:
|
||||
*
|
||||
* ```js
|
||||
* const cluster = require('cluster');
|
||||
* const dgram = require('dgram');
|
||||
* if (cluster.isPrimary) {
|
||||
* cluster.fork(); // Works ok.
|
||||
* cluster.fork(); // Fails with EADDRINUSE.
|
||||
* } else {
|
||||
* const s = dgram.createSocket('udp4');
|
||||
* s.bind(1234, () => {
|
||||
* s.addMembership('224.0.0.114');
|
||||
* });
|
||||
* }
|
||||
* ```
|
||||
* @since v0.6.9
|
||||
*/
|
||||
|
||||
class Socket extends events.EventEmitter {
|
||||
addMembership(multicastAddress: string, multicastInterface?: string): void;
|
||||
/**
|
||||
* Returns an object containing the address information for a socket.
|
||||
* For UDP sockets, this object will contain `address`, `family` and `port`properties.
|
||||
*
|
||||
* This method throws `EBADF` if called on an unbound socket.
|
||||
* @since v0.1.99
|
||||
*/
|
||||
address(): AddressInfo;
|
||||
/**
|
||||
* For UDP sockets, causes the `dgram.Socket` to listen for datagram
|
||||
* messages on a named `port` and optional `address`. If `port` is not
|
||||
* specified or is `0`, the operating system will attempt to bind to a
|
||||
* random port. If `address` is not specified, the operating system will
|
||||
* attempt to listen on all addresses. Once binding is complete, a`'listening'` event is emitted and the optional `callback` function is
|
||||
* called.
|
||||
*
|
||||
* Specifying both a `'listening'` event listener and passing a`callback` to the `socket.bind()` method is not harmful but not very
|
||||
* useful.
|
||||
*
|
||||
* A bound datagram socket keeps the Node.js process running to receive
|
||||
* datagram messages.
|
||||
*
|
||||
* If binding fails, an `'error'` event is generated. In rare case (e.g.
|
||||
* attempting to bind with a closed socket), an `Error` may be thrown.
|
||||
*
|
||||
* Example of a UDP server listening on port 41234:
|
||||
*
|
||||
* ```js
|
||||
* const dgram = require('dgram');
|
||||
* const server = dgram.createSocket('udp4');
|
||||
*
|
||||
* server.on('error', (err) => {
|
||||
* console.log(`server error:\n${err.stack}`);
|
||||
* server.close();
|
||||
* });
|
||||
*
|
||||
* server.on('message', (msg, rinfo) => {
|
||||
* console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`);
|
||||
* });
|
||||
*
|
||||
* server.on('listening', () => {
|
||||
* const address = server.address();
|
||||
* console.log(`server listening ${address.address}:${address.port}`);
|
||||
* });
|
||||
*
|
||||
* server.bind(41234);
|
||||
* // Prints: server listening 0.0.0.0:41234
|
||||
* ```
|
||||
* @since v0.1.99
|
||||
* @param callback with no parameters. Called when binding is complete.
|
||||
*/
|
||||
bind(port?: number, address?: string, callback?: () => void): void;
|
||||
bind(port?: number, callback?: () => void): void;
|
||||
bind(callback?: () => void): void;
|
||||
bind(options: BindOptions, callback?: () => void): void;
|
||||
/**
|
||||
* Close the underlying socket and stop listening for data on it. If a callback is
|
||||
* provided, it is added as a listener for the `'close'` event.
|
||||
* @since v0.1.99
|
||||
* @param callback Called when the socket has been closed.
|
||||
*/
|
||||
close(callback?: () => void): void;
|
||||
/**
|
||||
* Associates the `dgram.Socket` to a remote address and port. Every
|
||||
* message sent by this handle is automatically sent to that destination. Also,
|
||||
* the socket will only receive messages from that remote peer.
|
||||
* Trying to call `connect()` on an already connected socket will result
|
||||
* in an `ERR_SOCKET_DGRAM_IS_CONNECTED` exception. If `address` is not
|
||||
* provided, `'127.0.0.1'` (for `udp4` sockets) or `'::1'` (for `udp6` sockets)
|
||||
* will be used by default. Once the connection is complete, a `'connect'` event
|
||||
* is emitted and the optional `callback` function is called. In case of failure,
|
||||
* the `callback` is called or, failing this, an `'error'` event is emitted.
|
||||
* @since v12.0.0
|
||||
* @param callback Called when the connection is completed or on error.
|
||||
*/
|
||||
connect(port: number, address?: string, callback?: () => void): void;
|
||||
connect(port: number, callback: () => void): void;
|
||||
/**
|
||||
* A synchronous function that disassociates a connected `dgram.Socket` from
|
||||
* its remote address. Trying to call `disconnect()` on an unbound or already
|
||||
* disconnected socket will result in an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception.
|
||||
* @since v12.0.0
|
||||
*/
|
||||
disconnect(): void;
|
||||
/**
|
||||
* Instructs the kernel to leave a multicast group at `multicastAddress` using the`IP_DROP_MEMBERSHIP` socket option. This method is automatically called by the
|
||||
* kernel when the socket is closed or the process terminates, so most apps will
|
||||
* never have reason to call this.
|
||||
*
|
||||
* If `multicastInterface` is not specified, the operating system will attempt to
|
||||
* drop membership on all valid interfaces.
|
||||
* @since v0.6.9
|
||||
*/
|
||||
dropMembership(multicastAddress: string, multicastInterface?: string): void;
|
||||
/**
|
||||
* This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket.
|
||||
* @since v8.7.0
|
||||
* @return the `SO_RCVBUF` socket receive buffer size in bytes.
|
||||
*/
|
||||
getRecvBufferSize(): number;
|
||||
/**
|
||||
* This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket.
|
||||
* @since v8.7.0
|
||||
* @return the `SO_SNDBUF` socket send buffer size in bytes.
|
||||
*/
|
||||
getSendBufferSize(): number;
|
||||
/**
|
||||
* By default, binding a socket will cause it to block the Node.js process from
|
||||
* exiting as long as the socket is open. The `socket.unref()` method can be used
|
||||
* to exclude the socket from the reference counting that keeps the Node.js
|
||||
* process active. The `socket.ref()` method adds the socket back to the reference
|
||||
* counting and restores the default behavior.
|
||||
*
|
||||
* Calling `socket.ref()` multiples times will have no additional effect.
|
||||
*
|
||||
* The `socket.ref()` method returns a reference to the socket so calls can be
|
||||
* chained.
|
||||
* @since v0.9.1
|
||||
*/
|
||||
ref(): this;
|
||||
/**
|
||||
* Returns an object containing the `address`, `family`, and `port` of the remote
|
||||
* endpoint. This method throws an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception
|
||||
* if the socket is not connected.
|
||||
* @since v12.0.0
|
||||
*/
|
||||
remoteAddress(): AddressInfo;
|
||||
/**
|
||||
* Broadcasts a datagram on the socket.
|
||||
* For connectionless sockets, the destination `port` and `address` must be
|
||||
* specified. Connected sockets, on the other hand, will use their associated
|
||||
* remote endpoint, so the `port` and `address` arguments must not be set.
|
||||
*
|
||||
* The `msg` argument contains the message to be sent.
|
||||
* Depending on its type, different behavior can apply. If `msg` is a `Buffer`,
|
||||
* any `TypedArray` or a `DataView`,
|
||||
* the `offset` and `length` specify the offset within the `Buffer` where the
|
||||
* message begins and the number of bytes in the message, respectively.
|
||||
* If `msg` is a `String`, then it is automatically converted to a `Buffer`with `'utf8'` encoding. With messages that
|
||||
* contain multi-byte characters, `offset` and `length` will be calculated with
|
||||
* respect to `byte length` and not the character position.
|
||||
* If `msg` is an array, `offset` and `length` must not be specified.
|
||||
*
|
||||
* The `address` argument is a string. If the value of `address` is a host name,
|
||||
* DNS will be used to resolve the address of the host. If `address` is not
|
||||
* provided or otherwise falsy, `'127.0.0.1'` (for `udp4` sockets) or `'::1'`(for `udp6` sockets) will be used by default.
|
||||
*
|
||||
* If the socket has not been previously bound with a call to `bind`, the socket
|
||||
* is assigned a random port number and is bound to the "all interfaces" address
|
||||
* (`'0.0.0.0'` for `udp4` sockets, `'::0'` for `udp6` sockets.)
|
||||
*
|
||||
* An optional `callback` function may be specified to as a way of reporting
|
||||
* DNS errors or for determining when it is safe to reuse the `buf` object.
|
||||
* DNS lookups delay the time to send for at least one tick of the
|
||||
* Node.js event loop.
|
||||
*
|
||||
* The only way to know for sure that the datagram has been sent is by using a`callback`. If an error occurs and a `callback` is given, the error will be
|
||||
* passed as the first argument to the `callback`. If a `callback` is not given,
|
||||
* the error is emitted as an `'error'` event on the `socket` object.
|
||||
*
|
||||
* Offset and length are optional but both _must_ be set if either are used.
|
||||
* They are supported only when the first argument is a `Buffer`, a `TypedArray`,
|
||||
* or a `DataView`.
|
||||
*
|
||||
* This method throws `ERR_SOCKET_BAD_PORT` if called on an unbound socket.
|
||||
*
|
||||
* Example of sending a UDP packet to a port on `localhost`;
|
||||
*
|
||||
* ```js
|
||||
* const dgram = require('dgram');
|
||||
* const message = Buffer.from('Some bytes');
|
||||
* const client = dgram.createSocket('udp4');
|
||||
* client.send(message, 41234, 'localhost', (err) => {
|
||||
* client.close();
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* Example of sending a UDP packet composed of multiple buffers to a port on`127.0.0.1`;
|
||||
*
|
||||
* ```js
|
||||
* const dgram = require('dgram');
|
||||
* const buf1 = Buffer.from('Some ');
|
||||
* const buf2 = Buffer.from('bytes');
|
||||
* const client = dgram.createSocket('udp4');
|
||||
* client.send([buf1, buf2], 41234, (err) => {
|
||||
* client.close();
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* Sending multiple buffers might be faster or slower depending on the
|
||||
* application and operating system. Run benchmarks to
|
||||
* determine the optimal strategy on a case-by-case basis. Generally speaking,
|
||||
* however, sending multiple buffers is faster.
|
||||
*
|
||||
* Example of sending a UDP packet using a socket connected to a port on`localhost`:
|
||||
*
|
||||
* ```js
|
||||
* const dgram = require('dgram');
|
||||
* const message = Buffer.from('Some bytes');
|
||||
* const client = dgram.createSocket('udp4');
|
||||
* client.connect(41234, 'localhost', (err) => {
|
||||
* client.send(message, (err) => {
|
||||
* client.close();
|
||||
* });
|
||||
* });
|
||||
* ```
|
||||
* @since v0.1.99
|
||||
* @param msg Message to be sent.
|
||||
* @param offset Offset in the buffer where the message starts.
|
||||
* @param length Number of bytes in the message.
|
||||
* @param port Destination port.
|
||||
* @param address Destination host name or IP address.
|
||||
* @param callback Called when the message has been sent.
|
||||
*/
|
||||
send(msg: string | Uint8Array | ReadonlyArray<any>, port?: number, address?: string, callback?: (error: Error | null, bytes: number) => void): void;
|
||||
send(msg: string | Uint8Array | ReadonlyArray<any>, port?: number, callback?: (error: Error | null, bytes: number) => void): void;
|
||||
send(msg: string | Uint8Array | ReadonlyArray<any>, callback?: (error: Error | null, bytes: number) => void): void;
|
||||
send(msg: string | Uint8Array, offset: number, length: number, port?: number, address?: string, callback?: (error: Error | null, bytes: number) => void): void;
|
||||
send(msg: string | Uint8Array, offset: number, length: number, port?: number, callback?: (error: Error | null, bytes: number) => void): void;
|
||||
send(msg: string | Uint8Array, offset: number, length: number, callback?: (error: Error | null, bytes: number) => void): void;
|
||||
/**
|
||||
* Sets or clears the `SO_BROADCAST` socket option. When set to `true`, UDP
|
||||
* packets may be sent to a local interface's broadcast address.
|
||||
*
|
||||
* This method throws `EBADF` if called on an unbound socket.
|
||||
* @since v0.6.9
|
||||
*/
|
||||
setBroadcast(flag: boolean): void;
|
||||
/**
|
||||
* _All references to scope in this section are referring to[IPv6 Zone Indices](https://en.wikipedia.org/wiki/IPv6_address#Scoped_literal_IPv6_addresses), which are defined by [RFC
|
||||
* 4007](https://tools.ietf.org/html/rfc4007). In string form, an IP_
|
||||
* _with a scope index is written as `'IP%scope'` where scope is an interface name_
|
||||
* _or interface number._
|
||||
*
|
||||
* Sets the default outgoing multicast interface of the socket to a chosen
|
||||
* interface or back to system interface selection. The `multicastInterface` must
|
||||
* be a valid string representation of an IP from the socket's family.
|
||||
*
|
||||
* For IPv4 sockets, this should be the IP configured for the desired physical
|
||||
* interface. All packets sent to multicast on the socket will be sent on the
|
||||
* interface determined by the most recent successful use of this call.
|
||||
*
|
||||
* For IPv6 sockets, `multicastInterface` should include a scope to indicate the
|
||||
* interface as in the examples that follow. In IPv6, individual `send` calls can
|
||||
* also use explicit scope in addresses, so only packets sent to a multicast
|
||||
* address without specifying an explicit scope are affected by the most recent
|
||||
* successful use of this call.
|
||||
*
|
||||
* This method throws `EBADF` if called on an unbound socket.
|
||||
*
|
||||
* #### Example: IPv6 outgoing multicast interface
|
||||
*
|
||||
* On most systems, where scope format uses the interface name:
|
||||
*
|
||||
* ```js
|
||||
* const socket = dgram.createSocket('udp6');
|
||||
*
|
||||
* socket.bind(1234, () => {
|
||||
* socket.setMulticastInterface('::%eth1');
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* On Windows, where scope format uses an interface number:
|
||||
*
|
||||
* ```js
|
||||
* const socket = dgram.createSocket('udp6');
|
||||
*
|
||||
* socket.bind(1234, () => {
|
||||
* socket.setMulticastInterface('::%2');
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* #### Example: IPv4 outgoing multicast interface
|
||||
*
|
||||
* All systems use an IP of the host on the desired physical interface:
|
||||
*
|
||||
* ```js
|
||||
* const socket = dgram.createSocket('udp4');
|
||||
*
|
||||
* socket.bind(1234, () => {
|
||||
* socket.setMulticastInterface('10.0.0.2');
|
||||
* });
|
||||
* ```
|
||||
* @since v8.6.0
|
||||
*/
|
||||
setMulticastInterface(multicastInterface: string): void;
|
||||
/**
|
||||
* Sets or clears the `IP_MULTICAST_LOOP` socket option. When set to `true`,
|
||||
* multicast packets will also be received on the local interface.
|
||||
*
|
||||
* This method throws `EBADF` if called on an unbound socket.
|
||||
* @since v0.3.8
|
||||
*/
|
||||
setMulticastLoopback(flag: boolean): void;
|
||||
/**
|
||||
* Sets the `IP_MULTICAST_TTL` socket option. While TTL generally stands for
|
||||
* "Time to Live", in this context it specifies the number of IP hops that a
|
||||
* packet is allowed to travel through, specifically for multicast traffic. Each
|
||||
* router or gateway that forwards a packet decrements the TTL. If the TTL is
|
||||
* decremented to 0 by a router, it will not be forwarded.
|
||||
*
|
||||
* The `ttl` argument may be between 0 and 255\. The default on most systems is `1`.
|
||||
*
|
||||
* This method throws `EBADF` if called on an unbound socket.
|
||||
* @since v0.3.8
|
||||
*/
|
||||
setMulticastTTL(ttl: number): void;
|
||||
/**
|
||||
* Sets the `SO_RCVBUF` socket option. Sets the maximum socket receive buffer
|
||||
* in bytes.
|
||||
*
|
||||
* This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket.
|
||||
* @since v8.7.0
|
||||
*/
|
||||
setRecvBufferSize(size: number): void;
|
||||
/**
|
||||
* Sets the `SO_SNDBUF` socket option. Sets the maximum socket send buffer
|
||||
* in bytes.
|
||||
*
|
||||
* This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket.
|
||||
* @since v8.7.0
|
||||
*/
|
||||
setSendBufferSize(size: number): void;
|
||||
/**
|
||||
* Sets the `IP_TTL` socket option. While TTL generally stands for "Time to Live",
|
||||
* in this context it specifies the number of IP hops that a packet is allowed to
|
||||
* travel through. Each router or gateway that forwards a packet decrements the
|
||||
* TTL. If the TTL is decremented to 0 by a router, it will not be forwarded.
|
||||
* Changing TTL values is typically done for network probes or when multicasting.
|
||||
*
|
||||
* The `ttl` argument may be between between 1 and 255\. The default on most systems
|
||||
* is 64.
|
||||
*
|
||||
* This method throws `EBADF` if called on an unbound socket.
|
||||
* @since v0.1.101
|
||||
*/
|
||||
setTTL(ttl: number): void;
|
||||
/**
|
||||
* By default, binding a socket will cause it to block the Node.js process from
|
||||
* exiting as long as the socket is open. The `socket.unref()` method can be used
|
||||
* to exclude the socket from the reference counting that keeps the Node.js
|
||||
* process active, allowing the process to exit even if the socket is still
|
||||
* listening.
|
||||
*
|
||||
* Calling `socket.unref()` multiple times will have no addition effect.
|
||||
*
|
||||
* The `socket.unref()` method returns a reference to the socket so calls can be
|
||||
* chained.
|
||||
* @since v0.9.1
|
||||
*/
|
||||
unref(): this;
|
||||
/**
|
||||
* Tells the kernel to join a source-specific multicast channel at the given`sourceAddress` and `groupAddress`, using the `multicastInterface` with the`IP_ADD_SOURCE_MEMBERSHIP` socket
|
||||
* option. If the `multicastInterface` argument
|
||||
* is not specified, the operating system will choose one interface and will add
|
||||
* membership to it. To add membership to every available interface, call`socket.addSourceSpecificMembership()` multiple times, once per interface.
|
||||
*
|
||||
* When called on an unbound socket, this method will implicitly bind to a random
|
||||
* port, listening on all interfaces.
|
||||
* @since v13.1.0, v12.16.0
|
||||
*/
|
||||
addSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void;
|
||||
/**
|
||||
* Instructs the kernel to leave a source-specific multicast channel at the given`sourceAddress` and `groupAddress` using the `IP_DROP_SOURCE_MEMBERSHIP`socket option. This method is
|
||||
* automatically called by the kernel when the
|
||||
* socket is closed or the process terminates, so most apps will never have
|
||||
* reason to call this.
|
||||
*
|
||||
* If `multicastInterface` is not specified, the operating system will attempt to
|
||||
* drop membership on all valid interfaces.
|
||||
* @since v13.1.0, v12.16.0
|
||||
*/
|
||||
dropSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void;
|
||||
|
||||
/**
|
||||
* events.EventEmitter
|
||||
* 1. close
|
||||
@@ -494,43 +74,45 @@ declare module 'dgram' {
|
||||
* 5. message
|
||||
*/
|
||||
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||
addListener(event: 'close', listener: () => void): this;
|
||||
addListener(event: 'connect', listener: () => void): this;
|
||||
addListener(event: 'error', listener: (err: Error) => void): this;
|
||||
addListener(event: 'listening', listener: () => void): this;
|
||||
addListener(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
addListener(event: "close", listener: () => void): this;
|
||||
addListener(event: "connect", listener: () => void): this;
|
||||
addListener(event: "error", listener: (err: Error) => void): this;
|
||||
addListener(event: "listening", listener: () => void): this;
|
||||
addListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
|
||||
emit(event: string | symbol, ...args: any[]): boolean;
|
||||
emit(event: 'close'): boolean;
|
||||
emit(event: 'connect'): boolean;
|
||||
emit(event: 'error', err: Error): boolean;
|
||||
emit(event: 'listening'): boolean;
|
||||
emit(event: 'message', msg: Buffer, rinfo: RemoteInfo): boolean;
|
||||
emit(event: "close"): boolean;
|
||||
emit(event: "connect"): boolean;
|
||||
emit(event: "error", err: Error): boolean;
|
||||
emit(event: "listening"): boolean;
|
||||
emit(event: "message", msg: Buffer, rinfo: RemoteInfo): boolean;
|
||||
|
||||
on(event: string, listener: (...args: any[]) => void): this;
|
||||
on(event: 'close', listener: () => void): this;
|
||||
on(event: 'connect', listener: () => void): this;
|
||||
on(event: 'error', listener: (err: Error) => void): this;
|
||||
on(event: 'listening', listener: () => void): this;
|
||||
on(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
on(event: "close", listener: () => void): this;
|
||||
on(event: "connect", listener: () => void): this;
|
||||
on(event: "error", listener: (err: Error) => void): this;
|
||||
on(event: "listening", listener: () => void): this;
|
||||
on(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
|
||||
once(event: string, listener: (...args: any[]) => void): this;
|
||||
once(event: 'close', listener: () => void): this;
|
||||
once(event: 'connect', listener: () => void): this;
|
||||
once(event: 'error', listener: (err: Error) => void): this;
|
||||
once(event: 'listening', listener: () => void): this;
|
||||
once(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
once(event: "close", listener: () => void): this;
|
||||
once(event: "connect", listener: () => void): this;
|
||||
once(event: "error", listener: (err: Error) => void): this;
|
||||
once(event: "listening", listener: () => void): this;
|
||||
once(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
|
||||
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependListener(event: 'close', listener: () => void): this;
|
||||
prependListener(event: 'connect', listener: () => void): this;
|
||||
prependListener(event: 'error', listener: (err: Error) => void): this;
|
||||
prependListener(event: 'listening', listener: () => void): this;
|
||||
prependListener(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
prependListener(event: "close", listener: () => void): this;
|
||||
prependListener(event: "connect", listener: () => void): this;
|
||||
prependListener(event: "error", listener: (err: Error) => void): this;
|
||||
prependListener(event: "listening", listener: () => void): this;
|
||||
prependListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
|
||||
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependOnceListener(event: 'close', listener: () => void): this;
|
||||
prependOnceListener(event: 'connect', listener: () => void): this;
|
||||
prependOnceListener(event: 'error', listener: (err: Error) => void): this;
|
||||
prependOnceListener(event: 'listening', listener: () => void): this;
|
||||
prependOnceListener(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
prependOnceListener(event: "close", listener: () => void): this;
|
||||
prependOnceListener(event: "connect", listener: () => void): this;
|
||||
prependOnceListener(event: "error", listener: (err: Error) => void): this;
|
||||
prependOnceListener(event: "listening", listener: () => void): this;
|
||||
prependOnceListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this;
|
||||
}
|
||||
}
|
||||
declare module 'node:dgram' {
|
||||
export * from 'dgram';
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user