mirror of
https://github.com/github/codeql-action.git
synced 2025-12-07 00:08:06 +08:00
Compare commits
46 Commits
v1.1.24
...
edoardo/be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4e201b621d | ||
|
|
3400e51bc8 | ||
|
|
74740eef3d | ||
|
|
1ec8ea99ee | ||
|
|
2466f0ce2c | ||
|
|
a711c7623d | ||
|
|
39064e0f9b | ||
|
|
28c63d131f | ||
|
|
a4e4529299 | ||
|
|
cc4ee05a07 | ||
|
|
1f0700d1c0 | ||
|
|
cab46c529f | ||
|
|
e37b0d6470 | ||
|
|
314ede696b | ||
|
|
b96c7546c1 | ||
|
|
4c8f13758e | ||
|
|
b98b2def63 | ||
|
|
a643eb3621 | ||
|
|
6188cbdeb9 | ||
|
|
4706007119 | ||
|
|
86f3159a69 | ||
|
|
d1e2e02bee | ||
|
|
ff5ca122ed | ||
|
|
32ca2cf500 | ||
|
|
b2fc1e178e | ||
|
|
8a893ddf18 | ||
|
|
93ba53f2de | ||
|
|
1fa5d72846 | ||
|
|
417059fdb2 | ||
|
|
ca8a78d5f3 | ||
|
|
2264307214 | ||
|
|
3f97671248 | ||
|
|
c2c7bba5f7 | ||
|
|
1309aafb7d | ||
|
|
038242a7f0 | ||
|
|
70509c3884 | ||
|
|
e1ce6e3115 | ||
|
|
5ffcfe95cc | ||
|
|
aaca8193b1 | ||
|
|
2e9fbe39e1 | ||
|
|
fb28913d5c | ||
|
|
e0ef82e596 | ||
|
|
9f79e5fbcf | ||
|
|
b15cc0075a | ||
|
|
82495d8d86 | ||
|
|
4f104676ac |
36
.github/update-release-branch.py
vendored
36
.github/update-release-branch.py
vendored
@@ -67,7 +67,7 @@ def open_pr(
|
||||
body.append('Merging ' + source_branch_short_sha + ' into ' + target_branch)
|
||||
|
||||
body.append('')
|
||||
body.append('Conductor for this PR is @' + conductor)
|
||||
body.append(f'Conductor for this PR is @{conductor}.')
|
||||
|
||||
# List all PRs merged
|
||||
if len(pull_requests) > 0:
|
||||
@@ -75,32 +75,40 @@ def open_pr(
|
||||
body.append('Contains the following pull requests:')
|
||||
for pr in pull_requests:
|
||||
merger = get_merger_of_pr(repo, pr)
|
||||
body.append('- #' + str(pr.number) + ' - ' + pr.title +' (@' + merger + ')')
|
||||
body.append(f'- #{pr.number} (@{merger})')
|
||||
|
||||
# List all commits not part of a PR
|
||||
if len(commits_without_pull_requests) > 0:
|
||||
body.append('')
|
||||
body.append('Contains the following commits not from a pull request:')
|
||||
for commit in commits_without_pull_requests:
|
||||
author_description = ' (@' + commit.author.login + ')' if commit.author is not None else ''
|
||||
body.append('- ' + commit.sha + ' - ' + get_truncated_commit_message(commit) + author_description)
|
||||
author_description = f' (@{commit.author.login})' if commit.author is not None else ''
|
||||
body.append(f'- {commit.sha} - {get_truncated_commit_message(commit)}{author_description}')
|
||||
|
||||
body.append('')
|
||||
body.append('Please review the following:')
|
||||
body.append('Please do the following:')
|
||||
if len(conflicted_files) > 0:
|
||||
body.append(' - [ ] The `package.json` file contains the correct version.')
|
||||
body.append(' - [ ] You have added commits to this branch that resolve the merge conflicts ' +
|
||||
body.append(' - [ ] Ensure `package.json` file contains the correct version.')
|
||||
body.append(' - [ ] Add commits to this branch to resolve the merge conflicts ' +
|
||||
'in the following files:')
|
||||
body.extend([f' - [ ] `{file}`' for file in conflicted_files])
|
||||
body.append(' - [ ] Another maintainer has reviewed the additional commits you added to this ' +
|
||||
body.append(' - [ ] Ensure another maintainer has reviewed the additional commits you added to this ' +
|
||||
'branch to resolve the merge conflicts.')
|
||||
body.append(' - [ ] The CHANGELOG displays the correct version and date.')
|
||||
body.append(' - [ ] The CHANGELOG includes all relevant, user-facing changes since the last release.')
|
||||
body.append(' - [ ] There are no unexpected commits being merged into the ' + target_branch + ' branch.')
|
||||
body.append(' - [ ] The docs team is aware of any documentation changes that need to be released.')
|
||||
body.append(' - [ ] Ensure the CHANGELOG displays the correct version and date.')
|
||||
body.append(' - [ ] Ensure the CHANGELOG includes all relevant, user-facing changes since the last release.')
|
||||
body.append(' - [ ] Check that there are not any unexpected commits being merged into the ' + target_branch + ' branch.')
|
||||
body.append(' - [ ] Ensure the docs team is aware of any documentation changes that need to be released.')
|
||||
|
||||
if not is_v2_release:
|
||||
body.append(' - [ ] Remove and re-add the "Update dependencies" label to the PR to trigger just this workflow.')
|
||||
body.append(' - [ ] Wait for the "Update dependencies" workflow to push a commit updating the dependencies.')
|
||||
body.append(' - [ ] Mark the PR as ready for review to trigger the full set of PR checks.')
|
||||
|
||||
body.append(' - [ ] Approve and merge this PR.')
|
||||
|
||||
if is_v2_release:
|
||||
body.append(' - [ ] The mergeback PR is merged back into ' + source_branch + ' after this PR is merged.')
|
||||
body.append(' - [ ] The v1 release PR is merged after this PR is merged.')
|
||||
body.append(' - [ ] Merge the mergeback PR that will automatically be created once this PR is merged.')
|
||||
body.append(' - [ ] Merge the v1 release PR that will automatically be created once this PR is merged.')
|
||||
|
||||
title = 'Merge ' + source_branch + ' into ' + target_branch
|
||||
|
||||
|
||||
1
.github/workflows/__go-custom-queries.yml
generated
vendored
1
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -93,4 +93,5 @@ jobs:
|
||||
env:
|
||||
TEST_MODE: true
|
||||
env:
|
||||
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
|
||||
97
.github/workflows/__unset-environment.yml
generated
vendored
97
.github/workflows/__unset-environment.yml
generated
vendored
@@ -1,97 +0,0 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Test unsetting environment variables
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
unset-environment:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Test unsetting environment variables
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- shell: bash
|
||||
run: |
|
||||
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
|
||||
if [[ ! -d $CPP_DB ]] || [[ ! $CPP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for CPP, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
CSHARP_DB=${{ fromJson(steps.analysis.outputs.db-locations).csharp }}
|
||||
if [[ ! -d $CSHARP_DB ]] || [[ ! $CSHARP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for C Sharp, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
GO_DB=${{ fromJson(steps.analysis.outputs.db-locations).go }}
|
||||
if [[ ! -d $GO_DB ]] || [[ ! $GO_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Go, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
JAVA_DB=${{ fromJson(steps.analysis.outputs.db-locations).java }}
|
||||
if [[ ! -d $JAVA_DB ]] || [[ ! $JAVA_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Java, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
JAVASCRIPT_DB=${{ fromJson(steps.analysis.outputs.db-locations).javascript }}
|
||||
if [[ ! -d $JAVASCRIPT_DB ]] || [[ ! $JAVASCRIPT_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Javascript, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
PYTHON_DB=${{ fromJson(steps.analysis.outputs.db-locations).python }}
|
||||
if [[ ! -d $PYTHON_DB ]] || [[ ! $PYTHON_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Python, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
13
.github/workflows/post-release-mergeback.yml
vendored
13
.github/workflows/post-release-mergeback.yml
vendored
@@ -114,7 +114,17 @@ jobs:
|
||||
run: |
|
||||
set -exu
|
||||
pr_title="Mergeback ${VERSION} ${HEAD_BRANCH} into ${BASE_BRANCH}"
|
||||
pr_body="Updates version and changelog."
|
||||
pr_body=$(cat << EOF
|
||||
This PR bumps the version number and updates the changelog after the ${VERSION} release.
|
||||
|
||||
Please do the following:
|
||||
|
||||
- [ ] Remove and re-add the "Update dependencies" label to the PR to trigger just this workflow.
|
||||
- [ ] Wait for the "Update dependencies" workflow to push a commit updating the dependencies.
|
||||
- [ ] Mark the PR as ready for review to trigger the full set of PR checks.
|
||||
- [ ] Approve and merge the PR.
|
||||
EOF
|
||||
)
|
||||
|
||||
# Update the version number ready for the next release
|
||||
npm version patch --no-git-tag-version
|
||||
@@ -134,4 +144,5 @@ jobs:
|
||||
--title "${pr_title}" \
|
||||
--label "Update dependencies" \
|
||||
--body "${pr_body}" \
|
||||
--assignee "${GITHUB_ACTOR}" \
|
||||
--draft
|
||||
|
||||
9
.github/workflows/python-deps.yml
vendored
9
.github/workflows/python-deps.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
os: [ubuntu-latest, ubuntu-22.04, macos-latest]
|
||||
python_deps_type: [pipenv, poetry, requirements, setup_py]
|
||||
python_version: [2, 3]
|
||||
exclude:
|
||||
@@ -36,6 +36,9 @@ jobs:
|
||||
# Python2 and pipenv are not supported since pipenv v2021.11.5
|
||||
- python_version: 2
|
||||
python_deps_type: pipenv
|
||||
# Python2 is not available on ubuntu-22.04 by default -- see https://github.com/github/codeql-action/pull/1257
|
||||
- python_version: 2
|
||||
os: ubuntu-22.04
|
||||
|
||||
|
||||
env:
|
||||
@@ -63,6 +66,7 @@ jobs:
|
||||
|
||||
case ${{ matrix.os }} in
|
||||
ubuntu-latest*) basePath="/opt";;
|
||||
ubuntu-22.04*) basePath="/opt";;
|
||||
macos-latest*) basePath="/Users/runner";;
|
||||
esac
|
||||
echo ${basePath}
|
||||
@@ -86,7 +90,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
os: [ubuntu-latest, ubuntu-22.04, macos-latest]
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
@@ -109,6 +113,7 @@ jobs:
|
||||
|
||||
case ${{ matrix.os }} in
|
||||
ubuntu-latest*) basePath="/opt";;
|
||||
ubuntu-22.04*) basePath="/opt";;
|
||||
macos-latest*) basePath="/Users/runner";;
|
||||
esac
|
||||
echo ${basePath}
|
||||
|
||||
95
.github/workflows/unset-environment-new-cli.yml
vendored
Normal file
95
.github/workflows/unset-environment-new-cli.yml
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
# See `unset-environment-old-cli.yml` for reasoning behind the separate tests.
|
||||
name: PR Check - Test unsetting environment variables for CLI version >= 2.5.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
unset-environment:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: Test unsetting environment variables
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- shell: bash
|
||||
run: |
|
||||
CPP_DB="${{ fromJson(steps.analysis.outputs.db-locations).cpp }}"
|
||||
if [[ ! -d "$CPP_DB" ]] || [[ ! "$CPP_DB" == "${RUNNER_TEMP}/customDbLocation/cpp" ]]; then
|
||||
echo "::error::Did not create a database for CPP, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/cpp' but actual was '${CPP_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
CSHARP_DB="${{ fromJson(steps.analysis.outputs.db-locations).csharp }}"
|
||||
if [[ ! -d "$CSHARP_DB" ]] || [[ ! "$CSHARP_DB" == "${RUNNER_TEMP}/customDbLocation/csharp" ]]; then
|
||||
echo "::error::Did not create a database for C Sharp, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/csharp' but actual was '${CSHARP_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
GO_DB="${{ fromJson(steps.analysis.outputs.db-locations).go }}"
|
||||
if [[ ! -d "$GO_DB" ]] || [[ ! "$GO_DB" == "${RUNNER_TEMP}/customDbLocation/go" ]]; then
|
||||
echo "::error::Did not create a database for Go, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/go' but actual was '${GO_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
JAVA_DB="${{ fromJson(steps.analysis.outputs.db-locations).java }}"
|
||||
if [[ ! -d "$JAVA_DB" ]] || [[ ! "$JAVA_DB" == "${RUNNER_TEMP}/customDbLocation/java" ]]; then
|
||||
echo "::error::Did not create a database for Java, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/java' but actual was '${JAVA_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
JAVASCRIPT_DB="${{ fromJson(steps.analysis.outputs.db-locations).javascript }}"
|
||||
if [[ ! -d "$JAVASCRIPT_DB" ]] || [[ ! "$JAVASCRIPT_DB" == "${RUNNER_TEMP}/customDbLocation/javascript" ]]; then
|
||||
echo "::error::Did not create a database for Javascript, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/javascript' but actual was '${JAVASCRIPT_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
PYTHON_DB="${{ fromJson(steps.analysis.outputs.db-locations).python }}"
|
||||
if [[ ! -d "$PYTHON_DB" ]] || [[ ! "$PYTHON_DB" == "${RUNNER_TEMP}/customDbLocation/python" ]]; then
|
||||
echo "::error::Did not create a database for Python, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/python' but actual was '${PYTHON_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
89
.github/workflows/unset-environment-old-cli.yml
vendored
Normal file
89
.github/workflows/unset-environment-old-cli.yml
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
# There was a bug, fixed in CLI v2.5.1, that didn't propagate environment
|
||||
# variables that the Java tracer needed. Here we test all languages
|
||||
# except Java for these CLI versions. In `unset-environment-new-cli.yml`
|
||||
# we test all languages for recent CLI versions.
|
||||
name: PR Check - Test unsetting environment variables for CLI version < 2.5.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v1
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
unset-environment:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
name: Test unsetting environment variables
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: csharp,cpp,go,javascript,python
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- shell: bash
|
||||
run: |
|
||||
CPP_DB="${{ fromJson(steps.analysis.outputs.db-locations).cpp }}"
|
||||
if [[ ! -d "$CPP_DB" ]] || [[ ! "$CPP_DB" == "${RUNNER_TEMP}/customDbLocation/cpp" ]]; then
|
||||
echo "::error::Did not create a database for CPP, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/cpp' but actual was '${CPP_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
CSHARP_DB="${{ fromJson(steps.analysis.outputs.db-locations).csharp }}"
|
||||
if [[ ! -d "$CSHARP_DB" ]] || [[ ! "$CSHARP_DB" == "${RUNNER_TEMP}/customDbLocation/csharp" ]]; then
|
||||
echo "::error::Did not create a database for C Sharp, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/csharp' but actual was '${CSHARP_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
GO_DB="${{ fromJson(steps.analysis.outputs.db-locations).go }}"
|
||||
if [[ ! -d "$GO_DB" ]] || [[ ! "$GO_DB" == "${RUNNER_TEMP}/customDbLocation/go" ]]; then
|
||||
echo "::error::Did not create a database for Go, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/go' but actual was '${GO_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
JAVASCRIPT_DB="${{ fromJson(steps.analysis.outputs.db-locations).javascript }}"
|
||||
if [[ ! -d "$JAVASCRIPT_DB" ]] || [[ ! "$JAVASCRIPT_DB" == "${RUNNER_TEMP}/customDbLocation/javascript" ]]; then
|
||||
echo "::error::Did not create a database for Javascript, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/javascript' but actual was '${JAVASCRIPT_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
PYTHON_DB="${{ fromJson(steps.analysis.outputs.db-locations).python }}"
|
||||
if [[ ! -d "$PYTHON_DB" ]] || [[ ! "$PYTHON_DB" == "${RUNNER_TEMP}/customDbLocation/python" ]]; then
|
||||
echo "::error::Did not create a database for Python, or created it in the wrong location." \
|
||||
"Expected location was '${RUNNER_TEMP}/customDbLocation/python' but actual was '${PYTHON_DB}'"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
@@ -1,5 +1,14 @@
|
||||
# CodeQL Action Changelog
|
||||
|
||||
## [UNRELEASED]
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.0. [#1267](https://github.com/github/codeql-action/pull/1267)
|
||||
|
||||
## 2.1.25 - 21 Sep 2022
|
||||
|
||||
- We will soon be rolling out a feature of the CodeQL Action that stores some information used to make future runs faster in the GitHub Actions cache. Initially, this will only be enabled on JavaScript repositories, but we plan to add more languages to this soon. The new feature can be disabled by passing the `trap-caching: false` option to your workflow's `init` step, for example if you are already using the GitHub Actions cache for a different purpose and are near the storage limit for it.
|
||||
- Add support for Python automatic dependency installation with Poetry 1.2 [#1258](https://github.com/github/codeql-action/pull/1258).
|
||||
|
||||
## 2.1.24 - 16 Sep 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
2
lib/actions-util.js
generated
2
lib/actions-util.js
generated
@@ -452,7 +452,7 @@ async function getRef() {
|
||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||
// using GITHUB_REF. There is a subtle race condition where
|
||||
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||
// git rev-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||
const hasChangedRef = sha !== head &&
|
||||
(await (0, exports.getCommitOid)(checkoutPath, ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !== head;
|
||||
if (hasChangedRef) {
|
||||
|
||||
1
lib/analyze-action-env.test.js
generated
1
lib/analyze-action-env.test.js
generated
@@ -52,6 +52,7 @@ const util = __importStar(require("./util"));
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
trapCaches: {},
|
||||
});
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("token").returns("fake-token");
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
1
lib/analyze-action-input.test.js
generated
1
lib/analyze-action-input.test.js
generated
@@ -52,6 +52,7 @@ const util = __importStar(require("./util"));
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
trapCaches: {},
|
||||
});
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("token").returns("fake-token");
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;SACuB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
2
lib/analyze-action.js
generated
2
lib/analyze-action.js
generated
@@ -163,7 +163,7 @@ async function run() {
|
||||
await runAutobuildIfLegacyGoWorkflow(config, featureFlags, logger);
|
||||
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, featureFlags);
|
||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger);
|
||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger, featureFlags);
|
||||
}
|
||||
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
||||
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
|
||||
|
||||
File diff suppressed because one or more lines are too long
4
lib/analyze.js
generated
4
lib/analyze.js
generated
@@ -122,7 +122,7 @@ async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger,
|
||||
};
|
||||
}
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger) {
|
||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger, featureFlags) {
|
||||
const statusReport = {};
|
||||
let locPromise = Promise.resolve({});
|
||||
const cliCanCountBaseline = await cliCanCountLoC();
|
||||
@@ -147,7 +147,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
|
||||
}
|
||||
try {
|
||||
if (await util.useCodeScanningConfigInCli(codeql)) {
|
||||
if (await util.useCodeScanningConfigInCli(codeql, featureFlags)) {
|
||||
// If we are using the codescanning config in the CLI,
|
||||
// much of the work needed to generate the query suites
|
||||
// is done in the CLI. We just need to make a single
|
||||
|
||||
File diff suppressed because one or more lines are too long
5
lib/analyze.test.js
generated
5
lib/analyze.test.js
generated
@@ -30,6 +30,7 @@ const sinon = __importStar(require("sinon"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const codeql_1 = require("./codeql");
|
||||
const count = __importStar(require("./count-loc"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
@@ -131,7 +132,7 @@ const util = __importStar(require("./util"));
|
||||
builtin: ["foo.ql"],
|
||||
custom: [],
|
||||
};
|
||||
const builtinStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true));
|
||||
const builtinStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, feature_flags_1.createFeatureFlags)([]));
|
||||
const hasPacks = language in packs;
|
||||
const statusReportKeys = Object.keys(builtinStatusReport).sort();
|
||||
if (hasPacks) {
|
||||
@@ -157,7 +158,7 @@ const util = __importStar(require("./util"));
|
||||
},
|
||||
],
|
||||
};
|
||||
const customStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true));
|
||||
const customStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, feature_flags_1.createFeatureFlags)([]));
|
||||
t.deepEqual(Object.keys(customStatusReport).length, 2);
|
||||
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
|
||||
const expectedSearchPathsUsed = hasPacks
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
{ "maximumVersion": "3.7", "minimumVersion": "3.2" }
|
||||
{ "maximumVersion": "3.7", "minimumVersion": "3.3" }
|
||||
|
||||
6
lib/codeql.js
generated
6
lib/codeql.js
generated
@@ -515,7 +515,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
extraArgs.push("--no-internal-use-lua-tracing");
|
||||
}
|
||||
}
|
||||
const configLocation = await generateCodescanningConfig(codeql, config);
|
||||
const configLocation = await generateCodescanningConfig(codeql, config, featureFlags);
|
||||
if (configLocation) {
|
||||
extraArgs.push(`--codescanning-config=${configLocation}`);
|
||||
}
|
||||
@@ -875,9 +875,9 @@ async function runTool(cmd, args = []) {
|
||||
* @param config The configuration to use.
|
||||
* @returns the path to the generated user configuration file.
|
||||
*/
|
||||
async function generateCodescanningConfig(codeql, config) {
|
||||
async function generateCodescanningConfig(codeql, config, featureFlags) {
|
||||
var _a;
|
||||
if (!(await util.useCodeScanningConfigInCli(codeql))) {
|
||||
if (!(await util.useCodeScanningConfigInCli(codeql, featureFlags))) {
|
||||
return;
|
||||
}
|
||||
const configLocation = path.resolve(config.tempDir, "user-config.yaml");
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
lib/config-utils.js
generated
2
lib/config-utils.js
generated
@@ -908,7 +908,7 @@ async function initConfig(languagesInput, queriesInput, packsInput, registriesIn
|
||||
// When using the codescanning config in the CLI, pack downloads
|
||||
// happen in the CLI during the `database init` command, so no need
|
||||
// to download them here.
|
||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL))) {
|
||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, featureFlags))) {
|
||||
const registries = parseRegistries(registriesInput);
|
||||
await downloadPacks(codeQL, config.languages, config.packs, registries, apiDetails, config.tempDir, logger);
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20220908"
|
||||
"bundleVersion": "codeql-bundle-20220923"
|
||||
}
|
||||
|
||||
1
lib/feature-flags.js
generated
1
lib/feature-flags.js
generated
@@ -28,6 +28,7 @@ var FeatureFlag;
|
||||
FeatureFlag["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||
FeatureFlag["TrapCachingEnabled"] = "trap_caching_enabled";
|
||||
FeatureFlag["GolangExtractionReconciliationEnabled"] = "golang_extraction_reconciliation_enabled";
|
||||
FeatureFlag["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
||||
})(FeatureFlag = exports.FeatureFlag || (exports.FeatureFlag = {}));
|
||||
class GitHubFeatureFlags {
|
||||
constructor(gitHubVersion, apiDetails, repositoryNwo, logger) {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA8D;AAG9D,6CAA+B;AAM/B,IAAY,WAKX;AALD,WAAY,WAAW;IACrB,kEAAmD,CAAA;IACnD,qEAAsD,CAAA;IACtD,0DAA2C,CAAA;IAC3C,iGAAkF,CAAA;AACpF,CAAC,EALW,WAAW,GAAX,mBAAW,KAAX,mBAAW,QAKtB;AAUD,MAAa,kBAAkB;IAG7B,YACU,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,eAAU,GAAV,UAAU,CAAkB;QAC5B,kBAAa,GAAb,aAAa,CAAe;QAC5B,WAAM,GAAN,MAAM,CAAQ;IACrB,CAAC;IAEJ,KAAK,CAAC,QAAQ,CAAC,IAAiB;QAC9B,oDAAoD;QACpD,IAAI,IAAI,KAAK,WAAW,CAAC,sBAAsB,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE;YACtE,OAAO,KAAK,CAAC;SACd;QAED,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC7C,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,qCAAqC,IAAI,4BAA4B,CACtE,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,MAAM,SAAS,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC;QACjC,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,iBAAiB,IAAI,uDAAuD,CAC7E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;YACjC,iDAAiD;YACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;gBACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8DAA8D,CAC/D,CAAC;gBACF,OAAO,EAAE,CAAC;aACX;YACD,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC7C,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,8DAA8D,EAC9D;oBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;oBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;iBAC9B,CACF,CAAC;gBACF,OAAO,QAAQ,CAAC,IAAI,CAAC;aACtB;YAAC,OAAO,CAAC,EAAE;gBACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;oBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;wBAC9F,oEAAoE;wBACpE,qFAAqF;wBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;iBACH;qBAAM;oBACL,uFAAuF;oBACvF,mFAAmF;oBACnF,2FAA2F;oBAC3F,qBAAqB;oBACrB,MAAM,IAAI,KAAK,CACb,4DAA4D,CAAC,EAAE,CAChE,CAAC;iBACH;aACF;QACH,CAAC,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,CAAC,iBAAiB,IAAI,CAAC,MAAM,eAAe,EAAE,CAAC,CAAC;QACxE,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;CACF;AA5ED,gDA4EC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,YAA2B;IAC5D,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YACvB,OAAO,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;AACJ,CAAC;AAND,gDAMC"}
|
||||
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA8D;AAG9D,6CAA+B;AAM/B,IAAY,WAMX;AAND,WAAY,WAAW;IACrB,kEAAmD,CAAA;IACnD,qEAAsD,CAAA;IACtD,0DAA2C,CAAA;IAC3C,iGAAkF,CAAA;IAClF,+DAAgD,CAAA;AAClD,CAAC,EANW,WAAW,GAAX,mBAAW,KAAX,mBAAW,QAMtB;AAUD,MAAa,kBAAkB;IAG7B,YACU,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,eAAU,GAAV,UAAU,CAAkB;QAC5B,kBAAa,GAAb,aAAa,CAAe;QAC5B,WAAM,GAAN,MAAM,CAAQ;IACrB,CAAC;IAEJ,KAAK,CAAC,QAAQ,CAAC,IAAiB;QAC9B,oDAAoD;QACpD,IAAI,IAAI,KAAK,WAAW,CAAC,sBAAsB,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE;YACtE,OAAO,KAAK,CAAC;SACd;QAED,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC7C,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,qCAAqC,IAAI,4BAA4B,CACtE,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,MAAM,SAAS,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC;QACjC,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,iBAAiB,IAAI,uDAAuD,CAC7E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;YACjC,iDAAiD;YACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;gBACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8DAA8D,CAC/D,CAAC;gBACF,OAAO,EAAE,CAAC;aACX;YACD,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC7C,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,8DAA8D,EAC9D;oBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;oBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;iBAC9B,CACF,CAAC;gBACF,OAAO,QAAQ,CAAC,IAAI,CAAC;aACtB;YAAC,OAAO,CAAC,EAAE;gBACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;oBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;wBAC9F,oEAAoE;wBACpE,qFAAqF;wBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;iBACH;qBAAM;oBACL,uFAAuF;oBACvF,mFAAmF;oBACnF,2FAA2F;oBAC3F,qBAAqB;oBACrB,MAAM,IAAI,KAAK,CACb,4DAA4D,CAAC,EAAE,CAChE,CAAC;iBACH;aACF;QACH,CAAC,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,CAAC,iBAAiB,IAAI,CAAC,MAAM,eAAe,EAAE,CAAC,CAAC;QACxE,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;CACF;AA5ED,gDA4EC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,YAA2B;IAC5D,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YACvB,OAAO,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;AACJ,CAAC;AAND,gDAMC"}
|
||||
2
lib/runner.js
generated
2
lib/runner.js
generated
@@ -295,7 +295,7 @@ program
|
||||
const threads = (0, util_1.getThreadsFlag)(cmd.threads || initEnv["CODEQL_THREADS"], logger);
|
||||
const memory = (0, util_1.getMemoryFlag)(cmd.ram || initEnv["CODEQL_RAM"]);
|
||||
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, (0, feature_flags_1.createFeatureFlags)([]));
|
||||
await (0, analyze_1.runQueries)(outputDir, memory, (0, util_1.getAddSnippetsFlag)(cmd.addSnippets), threads, cmd.category, config, logger);
|
||||
await (0, analyze_1.runQueries)(outputDir, memory, (0, util_1.getAddSnippetsFlag)(cmd.addSnippets), threads, cmd.category, config, logger, (0, feature_flags_1.createFeatureFlags)([]));
|
||||
if (!cmd.upload) {
|
||||
logger.info("Not uploading results");
|
||||
return;
|
||||
|
||||
File diff suppressed because one or more lines are too long
33
lib/trap-caching.js
generated
33
lib/trap-caching.js
generated
@@ -18,19 +18,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getTotalCacheSize = exports.getLanguagesSupportingCaching = exports.uploadTrapCaches = exports.downloadTrapCaches = exports.getTrapCachingExtractorConfigArgsForLang = exports.getTrapCachingExtractorConfigArgs = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const util_1 = require("util");
|
||||
const cache = __importStar(require("@actions/cache"));
|
||||
const get_folder_size_1 = __importDefault(require("get-folder-size"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const util_2 = require("./util");
|
||||
const util_1 = require("./util");
|
||||
// This constant should be bumped if we make a breaking change
|
||||
// to how the CodeQL Action stores or retrieves the TRAP cache,
|
||||
// and will invalidate previous caches. We don't need to bump
|
||||
@@ -39,6 +34,9 @@ const util_2 = require("./util");
|
||||
const CACHE_VERSION = 1;
|
||||
// This constant sets the size of each TRAP cache in megabytes.
|
||||
const CACHE_SIZE_MB = 1024;
|
||||
// This constant sets the minimum size in megabytes of a TRAP
|
||||
// cache for us to consider it worth uploading.
|
||||
const MINIMUM_CACHE_MB_TO_UPLOAD = 10;
|
||||
async function getTrapCachingExtractorConfigArgs(config) {
|
||||
const result = [];
|
||||
for (const language of config.languages)
|
||||
@@ -126,6 +124,15 @@ async function uploadTrapCaches(codeql, config, logger) {
|
||||
const cacheDir = config.trapCaches[language];
|
||||
if (cacheDir === undefined)
|
||||
continue;
|
||||
const trapFolderSize = await (0, util_1.tryGetFolderBytes)(cacheDir, logger);
|
||||
if (trapFolderSize === undefined) {
|
||||
logger.info(`Skipping upload of TRAP cache for ${language} as we couldn't determine its size`);
|
||||
continue;
|
||||
}
|
||||
if (trapFolderSize < MINIMUM_CACHE_MB_TO_UPLOAD * 1048576) {
|
||||
logger.info(`Skipping upload of TRAP cache for ${language} as it is too small`);
|
||||
continue;
|
||||
}
|
||||
const key = await cacheKey(codeql, language, process.env.GITHUB_SHA || "unknown");
|
||||
logger.info(`Uploading TRAP cache to Actions cache with key ${key}`);
|
||||
toAwait.push(cache.saveCache([cacheDir], key));
|
||||
@@ -137,7 +144,7 @@ exports.uploadTrapCaches = uploadTrapCaches;
|
||||
async function getLanguagesSupportingCaching(codeql, languages, logger) {
|
||||
var _a, _b, _c, _d;
|
||||
const result = [];
|
||||
if (!(await (0, util_2.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES)))
|
||||
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES)))
|
||||
return result;
|
||||
const resolveResult = await codeql.betterResolveLanguages();
|
||||
outer: for (const lang of languages) {
|
||||
@@ -168,16 +175,8 @@ async function getLanguagesSupportingCaching(codeql, languages, logger) {
|
||||
}
|
||||
exports.getLanguagesSupportingCaching = getLanguagesSupportingCaching;
|
||||
async function getTotalCacheSize(trapCaches, logger) {
|
||||
try {
|
||||
const sizes = await Promise.all(Object.values(trapCaches).map(async (cacheDir) => {
|
||||
return (0, util_1.promisify)(get_folder_size_1.default)(cacheDir);
|
||||
}));
|
||||
return sizes.reduce((a, b) => a + b, 0);
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Encountered an error while getting TRAP cache size: ${e}`);
|
||||
return 0;
|
||||
}
|
||||
const sizes = await Promise.all(Object.values(trapCaches).map((cacheDir) => (0, util_1.tryGetFolderBytes)(cacheDir, logger)));
|
||||
return sizes.map((a) => a || 0).reduce((a, b) => a + b, 0);
|
||||
}
|
||||
exports.getTotalCacheSize = getTotalCacheSize;
|
||||
async function cacheKey(codeql, language, baseSha) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
1
lib/trap-caching.test.js
generated
1
lib/trap-caching.test.js
generated
@@ -164,6 +164,7 @@ function getTestConfigWithTempDir(tmpDir) {
|
||||
const loggedMessages = [];
|
||||
const logger = (0, testing_utils_1.getRecordingLogger)(loggedMessages);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(util, "tryGetFolderBytes").resolves(999999999);
|
||||
const stubSave = sinon.stub(cache, "saveCache");
|
||||
process.env.GITHUB_SHA = "somesha";
|
||||
await (0, trap_caching_1.uploadTrapCaches)(stubCodeql, testConfigWithoutTmpDir, logger);
|
||||
|
||||
File diff suppressed because one or more lines are too long
82
lib/util.js
generated
82
lib/util.js
generated
@@ -22,12 +22,14 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isGoExtractionReconciliationEnabled = exports.listFolder = exports.doesDirectoryExist = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.checkActionVersion = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isGitHubGhesVersionBelow = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
exports.tryGetFolderBytes = exports.isGoExtractionReconciliationEnabled = exports.listFolder = exports.doesDirectoryExist = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.checkActionVersion = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isGitHubGhesVersionBelow = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getTotalMemoryBytes = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const util_1 = require("util");
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const get_folder_size_1 = __importDefault(require("get-folder-size"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const api_client_1 = require("./api-client");
|
||||
@@ -128,7 +130,7 @@ function getMemoryFlagValue(userInput) {
|
||||
}
|
||||
}
|
||||
else {
|
||||
const totalMemoryBytes = os.totalmem();
|
||||
const totalMemoryBytes = getTotalMemoryBytes();
|
||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes();
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
|
||||
@@ -136,6 +138,41 @@ function getMemoryFlagValue(userInput) {
|
||||
return Math.floor(memoryToUseMegaBytes);
|
||||
}
|
||||
exports.getMemoryFlagValue = getMemoryFlagValue;
|
||||
function getTotalMemoryBytes() {
|
||||
const nodeReportedMemory = os.totalmem();
|
||||
console.log(`Node reported ${nodeReportedMemory} bytes of memory.`);
|
||||
if (process.platform === "win32") {
|
||||
console.log("On Windows, so just returning the memory Node reported.");
|
||||
return nodeReportedMemory;
|
||||
}
|
||||
let lowestMemorySeen = nodeReportedMemory;
|
||||
try {
|
||||
const dockerMemoryLimit = parseInt(fs.readFileSync("/sys/fs/cgroup/memory/memory.limit_in_bytes", "utf8"));
|
||||
console.log(`Docker set a limit of ${dockerMemoryLimit} bytes of memory.`);
|
||||
lowestMemorySeen = Math.min(lowestMemorySeen, dockerMemoryLimit);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
try {
|
||||
const memoryInfo = fs.readFileSync("/proc/meminfo", "utf8").split("\n");
|
||||
const relevantLine = /^\s*MemTotal:\s*(\d+)\s*kB\s*$/;
|
||||
for (const line of memoryInfo) {
|
||||
const match = relevantLine.exec(line);
|
||||
if (match) {
|
||||
const memoryFromMemoryInfo = parseInt(match[1]) * 1024;
|
||||
console.log(`Found total memory of ${memoryFromMemoryInfo} in memory info.`);
|
||||
lowestMemorySeen = Math.min(lowestMemorySeen, memoryFromMemoryInfo);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
return lowestMemorySeen;
|
||||
}
|
||||
exports.getTotalMemoryBytes = getTotalMemoryBytes;
|
||||
/**
|
||||
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
||||
* specified, the total available memory will be used minus a threshold
|
||||
@@ -428,7 +465,7 @@ var EnvVar;
|
||||
* the codeql-config file to the codeql CLI to be processed there.
|
||||
*/
|
||||
EnvVar["CODEQL_PASS_CONFIG_TO_CLI"] = "CODEQL_PASS_CONFIG_TO_CLI";
|
||||
})(EnvVar || (EnvVar = {}));
|
||||
})(EnvVar = exports.EnvVar || (exports.EnvVar = {}));
|
||||
const exportVar = (mode, name, value) => {
|
||||
if (mode === Mode.actions) {
|
||||
core.exportVariable(name, value);
|
||||
@@ -489,6 +526,9 @@ function getRequiredEnvParam(paramName) {
|
||||
return value;
|
||||
}
|
||||
exports.getRequiredEnvParam = getRequiredEnvParam;
|
||||
function getOptionalEnvParam(paramName) {
|
||||
return process.env[paramName] || "";
|
||||
}
|
||||
class HTTPError extends Error {
|
||||
constructor(message, status) {
|
||||
super(message);
|
||||
@@ -657,9 +697,21 @@ exports.isInTestMode = isInTestMode;
|
||||
* @returns true if the action should generate a conde-scanning config file
|
||||
* that gets passed to the CLI.
|
||||
*/
|
||||
async function useCodeScanningConfigInCli(codeql) {
|
||||
return (process.env[EnvVar.CODEQL_PASS_CONFIG_TO_CLI] === "true" &&
|
||||
(await codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_CONFIG_FILES)));
|
||||
async function useCodeScanningConfigInCli(codeql, featureFlags) {
|
||||
const envVarIsEnabled = getOptionalEnvParam(EnvVar.CODEQL_PASS_CONFIG_TO_CLI);
|
||||
// If the user has explicitly turned off the feature, then don't use it.
|
||||
if (envVarIsEnabled.toLocaleLowerCase() === "false") {
|
||||
return false;
|
||||
}
|
||||
// If the user has explicitly turned on the feature, then use it.
|
||||
// Or if the feature flag is enabled, then use it.
|
||||
const isEnabled = envVarIsEnabled.toLocaleLowerCase() === "true" ||
|
||||
(await featureFlags.getValue(feature_flags_1.FeatureFlag.CliConfigFileEnabled));
|
||||
if (!isEnabled) {
|
||||
return false;
|
||||
}
|
||||
// If the CLI version is too old, then don't use it.
|
||||
return await codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_CONFIG_FILES);
|
||||
}
|
||||
exports.useCodeScanningConfigInCli = useCodeScanningConfigInCli;
|
||||
/*
|
||||
@@ -700,4 +752,22 @@ async function isGoExtractionReconciliationEnabled(featureFlags) {
|
||||
(await featureFlags.getValue(feature_flags_1.FeatureFlag.GolangExtractionReconciliationEnabled)));
|
||||
}
|
||||
exports.isGoExtractionReconciliationEnabled = isGoExtractionReconciliationEnabled;
|
||||
/**
|
||||
* Get the size a folder in bytes. This will log any filesystem errors
|
||||
* as a warning and then return undefined.
|
||||
*
|
||||
* @param cacheDir A directory to get the size of.
|
||||
* @param logger A logger to log any errors to.
|
||||
* @returns The size in bytes of the folder, or undefined if errors occurred.
|
||||
*/
|
||||
async function tryGetFolderBytes(cacheDir, logger) {
|
||||
try {
|
||||
return await (0, util_1.promisify)(get_folder_size_1.default)(cacheDir);
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Encountered an error while getting size of folder: ${e}`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
exports.tryGetFolderBytes = tryGetFolderBytes;
|
||||
//# sourceMappingURL=util.js.map
|
||||
File diff suppressed because one or more lines are too long
35
lib/util.test.js
generated
35
lib/util.test.js
generated
@@ -31,6 +31,7 @@ const github = __importStar(require("@actions/github"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
@@ -361,4 +362,38 @@ for (const [version, githubVersion, shouldReportWarning,] of CHECK_ACTION_VERSIO
|
||||
]);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("useCodeScanningConfigInCli with no env var", async (t) => {
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.0"), (0, feature_flags_1.createFeatureFlags)([]))));
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.1"), (0, feature_flags_1.createFeatureFlags)([]))));
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.0"), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.CliConfigFileEnabled]))));
|
||||
// Yay! It works!
|
||||
t.assert(await util.useCodeScanningConfigInCli(mockVersion("2.10.1"), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.CliConfigFileEnabled])));
|
||||
});
|
||||
for (const val of ["TRUE", "true", "True"]) {
|
||||
(0, ava_1.default)(`useCodeScanningConfigInCli with env var ${val}`, async (t) => {
|
||||
process.env[util.EnvVar.CODEQL_PASS_CONFIG_TO_CLI] = val;
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.0"), (0, feature_flags_1.createFeatureFlags)([]))));
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.0"), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.CliConfigFileEnabled]))));
|
||||
// Yay! It works!
|
||||
t.assert(await util.useCodeScanningConfigInCli(mockVersion("2.10.1"), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.CliConfigFileEnabled])));
|
||||
t.assert(await util.useCodeScanningConfigInCli(mockVersion("2.10.1"), (0, feature_flags_1.createFeatureFlags)([])));
|
||||
});
|
||||
}
|
||||
for (const val of ["FALSE", "false", "False"]) {
|
||||
(0, ava_1.default)(`useCodeScanningConfigInCli with env var ${val}`, async (t) => {
|
||||
// Never turned on when env var is false
|
||||
process.env[util.EnvVar.CODEQL_PASS_CONFIG_TO_CLI] = val;
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.0"), (0, feature_flags_1.createFeatureFlags)([]))));
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.0"), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.CliConfigFileEnabled]))));
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.1"), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.CliConfigFileEnabled]))));
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.1"), (0, feature_flags_1.createFeatureFlags)([]))));
|
||||
});
|
||||
}
|
||||
function mockVersion(version) {
|
||||
return {
|
||||
async getVersion() {
|
||||
return version;
|
||||
},
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=util.test.js.map
|
||||
File diff suppressed because one or more lines are too long
2
node_modules/.package-lock.json
generated
vendored
2
node_modules/.package-lock.json
generated
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "2.1.24",
|
||||
"version": "2.1.26",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "2.1.24",
|
||||
"version": "2.1.26",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "codeql",
|
||||
"version": "2.1.24",
|
||||
"version": "2.1.26",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^1.0.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "2.1.24",
|
||||
"version": "2.1.26",
|
||||
"private": true,
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
name: "Go: Custom queries"
|
||||
description: "Checks that Go works in conjunction with a config file specifying custom queries"
|
||||
env:
|
||||
DOTNET_GENERATE_ASPNET_CERTIFICATE: "false"
|
||||
steps:
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
name: "Test unsetting environment variables"
|
||||
description: "An end-to-end integration test that unsets some environment variables"
|
||||
os: ["ubuntu-latest"]
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
db-location: "${{ runner.temp }}/customDbLocation"
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: env -i PATH="$PATH" HOME="$HOME" ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
env:
|
||||
TEST_MODE: true
|
||||
- shell: bash
|
||||
run: |
|
||||
CPP_DB=${{ fromJson(steps.analysis.outputs.db-locations).cpp }}
|
||||
if [[ ! -d $CPP_DB ]] || [[ ! $CPP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for CPP, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
CSHARP_DB=${{ fromJson(steps.analysis.outputs.db-locations).csharp }}
|
||||
if [[ ! -d $CSHARP_DB ]] || [[ ! $CSHARP_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for C Sharp, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
GO_DB=${{ fromJson(steps.analysis.outputs.db-locations).go }}
|
||||
if [[ ! -d $GO_DB ]] || [[ ! $GO_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Go, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
JAVA_DB=${{ fromJson(steps.analysis.outputs.db-locations).java }}
|
||||
if [[ ! -d $JAVA_DB ]] || [[ ! $JAVA_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Java, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
JAVASCRIPT_DB=${{ fromJson(steps.analysis.outputs.db-locations).javascript }}
|
||||
if [[ ! -d $JAVASCRIPT_DB ]] || [[ ! $JAVASCRIPT_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Javascript, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
PYTHON_DB=${{ fromJson(steps.analysis.outputs.db-locations).python }}
|
||||
if [[ ! -d $PYTHON_DB ]] || [[ ! $PYTHON_DB == ${{ runner.temp }}/customDbLocation/* ]]; then
|
||||
echo "Did not create a database for Python, or created it in the wrong location."
|
||||
exit 1
|
||||
fi
|
||||
@@ -5,31 +5,46 @@ import os
|
||||
import subprocess
|
||||
from tempfile import mkdtemp
|
||||
from typing import Optional
|
||||
import shutil
|
||||
|
||||
import extractor_version
|
||||
|
||||
|
||||
def _check_call(command):
|
||||
def _check_call(command, extra_env={}):
|
||||
print('+ {}'.format(' '.join(command)), flush=True)
|
||||
subprocess.check_call(command, stdin=subprocess.DEVNULL)
|
||||
|
||||
env = os.environ.copy()
|
||||
env.update(extra_env)
|
||||
subprocess.check_call(command, stdin=subprocess.DEVNULL, env=env)
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
def _check_output(command):
|
||||
def _check_output(command, extra_env={}):
|
||||
print('+ {}'.format(' '.join(command)), flush=True)
|
||||
out = subprocess.check_output(command, stdin=subprocess.DEVNULL)
|
||||
|
||||
env = os.environ.copy()
|
||||
env.update(extra_env)
|
||||
out = subprocess.check_output(command, stdin=subprocess.DEVNULL, env=env)
|
||||
print(out, flush=True)
|
||||
sys.stderr.flush()
|
||||
return out
|
||||
|
||||
|
||||
def install_packages_with_poetry():
|
||||
|
||||
# To handle poetry 1.2, which started to use keyring interaction MUCH more, we need
|
||||
# add a workaround. See
|
||||
# https://github.com/python-poetry/poetry/issues/2692#issuecomment-1235683370
|
||||
extra_poetry_env = {"PYTHON_KEYRING_BACKEND": "keyring.backends.null.Keyring"}
|
||||
|
||||
command = [sys.executable, '-m', 'poetry']
|
||||
if sys.platform.startswith('win32'):
|
||||
# In windows the default path were the deps are installed gets wiped out between steps,
|
||||
# so we have to set it up to a folder that will be kept
|
||||
os.environ['POETRY_VIRTUALENVS_PATH'] = os.path.join(os.environ['RUNNER_WORKSPACE'], 'virtualenvs')
|
||||
try:
|
||||
_check_call(command + ['install', '--no-root'])
|
||||
_check_call(command + ['install', '--no-root'], extra_env=extra_poetry_env)
|
||||
except subprocess.CalledProcessError:
|
||||
sys.exit('package installation with poetry failed, see error above')
|
||||
|
||||
@@ -38,7 +53,7 @@ def install_packages_with_poetry():
|
||||
# virtualenv for the package, which was the case for using poetry for Python 2 when
|
||||
# default system interpreter was Python 3 :/
|
||||
|
||||
poetry_out = _check_output(command + ['run', 'which', 'python'])
|
||||
poetry_out = _check_output(command + ['run', 'which', 'python'], extra_env=extra_poetry_env)
|
||||
python_executable_path = poetry_out.decode('utf-8').splitlines()[-1]
|
||||
|
||||
if sys.platform.startswith('win32'):
|
||||
@@ -153,6 +168,19 @@ def install_packages(codeql_base_dir) -> Optional[str]:
|
||||
|
||||
# get_extractor_version returns the Python version the extractor thinks this repo is using
|
||||
version = extractor_version.get_extractor_version(codeql_base_dir, quiet=False)
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
if version == 2 and not sys.platform.startswith('win32'):
|
||||
# On Ubuntu 22.04 'python2' is not available by default. We want to give a slightly better
|
||||
# error message than a traceback + `No such file or directory: 'python2'`
|
||||
if shutil.which("python2") is None:
|
||||
sys.exit(
|
||||
"Python package installation failed: we detected this code as Python 2, but the 'python2' executable was not available. "
|
||||
"To enable automatic package installation, please install 'python2' before the 'github/codeql-action/init' step, "
|
||||
"for example by running 'sudo apt install python2' (Ubuntu 22.04). "
|
||||
"If your code is not Python 2, but actually Python 3, please file a bug report at https://github.com/github/codeql-action/issues/new"
|
||||
)
|
||||
|
||||
if os.path.exists('requirements.txt'):
|
||||
print('Found requirements.txt, will install packages with pip', flush=True)
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
#! /usr/bin/pwsh
|
||||
|
||||
py -2 -m pip install --user --upgrade pip setuptools wheel
|
||||
py -3 -m pip install --user --upgrade pip setuptools wheel
|
||||
# while waiting for the next release of `virtualenv` after v20.16.5, we install an older
|
||||
# version of `setuptools` to ensure that binaries are always put under
|
||||
# `<venv-path>/bin`, which wouldn't always happen with the GitHub actions version of
|
||||
# Ubuntu 22.04. See https://github.com/github/codeql-action/issues/1249
|
||||
py -2 -m pip install --user --upgrade pip 'setuptools<60' wheel
|
||||
py -3 -m pip install --user --upgrade pip 'setuptools<60' wheel
|
||||
|
||||
# virtualenv is a bit nicer for setting up virtual environment, since it will provide up-to-date versions of
|
||||
# pip/setuptools/wheel which basic `python3 -m venv venv` won't
|
||||
py -2 -m pip install --user 'virtualenv<20.11'
|
||||
py -3 -m pip install --user 'virtualenv<20.11'
|
||||
py -2 -m pip install --user 'virtualenv!=20.12.0'
|
||||
py -3 -m pip install --user virtualenv
|
||||
|
||||
# We aren't compatible with poetry 1.2
|
||||
py -3 -m pip install --user "poetry>=1.1,<1.2"
|
||||
py -3 -m pip install --user "poetry>=1.1"
|
||||
py -3 -m pip install --user pipenv
|
||||
|
||||
@@ -11,11 +11,17 @@ set -e
|
||||
export PATH="$HOME/.local/bin:$PATH"
|
||||
|
||||
# Setup Python 3 dependency installation tools.
|
||||
python3 -m pip install --user --upgrade pip setuptools wheel
|
||||
|
||||
# we install an older version of `setuptools` to ensure that binaries are always put
|
||||
# under `<venv-path>/bin`, which wouldn't always happen with the GitHub actions version
|
||||
# of Ubuntu 22.04. See https://github.com/github/codeql-action/issues/1249. The the next
|
||||
# release of `virtualenv` after v20.16.5 will include a fix for this, so we can remove
|
||||
# this bit of the logic again.
|
||||
python3 -m pip install --user --upgrade pip 'setuptools<60' wheel
|
||||
|
||||
# virtualenv is a bit nicer for setting up virtual environment, since it will provide up-to-date versions of
|
||||
# pip/setuptools/wheel which basic `python3 -m venv venv` won't
|
||||
python3 -m pip install --user 'virtualenv<20.11'
|
||||
python3 -m pip install --user virtualenv
|
||||
|
||||
# We install poetry with pip instead of the recommended way, since the recommended way
|
||||
# caused some problem since `poetry run` gives output like:
|
||||
@@ -24,8 +30,7 @@ python3 -m pip install --user 'virtualenv<20.11'
|
||||
# "program uses threads.", RuntimeWarning)
|
||||
# LGTM_PYTHON_SETUP_VERSION=The currently activated Python version 2.7.18 is not supported by the project (^3.5). Trying to find and use a compatible version. Using python3 (3.8.2) 3
|
||||
|
||||
# We aren't compatible with poetry 1.2
|
||||
python3 -m pip install --user "poetry>=1.1,<1.2"
|
||||
python3 -m pip install --user "poetry>=1.1"
|
||||
python3 -m pip install --user pipenv
|
||||
|
||||
if command -v python2 >/dev/null 2>&1; then
|
||||
@@ -40,7 +45,7 @@ if command -v python2 >/dev/null 2>&1; then
|
||||
curl --location --fail https://bootstrap.pypa.io/pip/2.7/get-pip.py | python2
|
||||
fi
|
||||
|
||||
python2 -m pip install --user --upgrade pip setuptools wheel
|
||||
python2 -m pip install --user --upgrade pip 'setuptools<60' wheel
|
||||
|
||||
python2 -m pip install --user 'virtualenv<20.11'
|
||||
python2 -m pip install --user 'virtualenv!=20.12.0'
|
||||
fi
|
||||
|
||||
@@ -545,7 +545,7 @@ export async function getRef(): Promise<string> {
|
||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||
// using GITHUB_REF. There is a subtle race condition where
|
||||
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||
// git git-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||
// git rev-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||
const hasChangedRef =
|
||||
sha !== head &&
|
||||
(await getCommitOid(
|
||||
|
||||
@@ -36,6 +36,7 @@ test("analyze action with RAM & threads from environment variables", async (t) =
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
trapCaches: {},
|
||||
} as unknown as configUtils.Config);
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("token").returns("fake-token");
|
||||
|
||||
@@ -36,6 +36,7 @@ test("analyze action with RAM & threads from action inputs", async (t) => {
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
trapCaches: {},
|
||||
} as unknown as configUtils.Config);
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("token").returns("fake-token");
|
||||
|
||||
@@ -245,6 +245,7 @@ async function run() {
|
||||
logger,
|
||||
featureFlags
|
||||
);
|
||||
|
||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||
runStats = await runQueries(
|
||||
outputDir,
|
||||
@@ -253,7 +254,8 @@ async function run() {
|
||||
threads,
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
config,
|
||||
logger
|
||||
logger,
|
||||
featureFlags
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
import { setCodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import * as count from "./count-loc";
|
||||
import { createFeatureFlags } from "./feature-flags";
|
||||
import { Language } from "./languages";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { setupTests, setupActionsVars } from "./testing-utils";
|
||||
@@ -138,7 +139,8 @@ test("status report fields and search path setting", async (t) => {
|
||||
threadsFlag,
|
||||
undefined,
|
||||
config,
|
||||
getRunnerLogger(true)
|
||||
getRunnerLogger(true),
|
||||
createFeatureFlags([])
|
||||
);
|
||||
const hasPacks = language in packs;
|
||||
const statusReportKeys = Object.keys(builtinStatusReport).sort();
|
||||
@@ -187,7 +189,8 @@ test("status report fields and search path setting", async (t) => {
|
||||
threadsFlag,
|
||||
undefined,
|
||||
config,
|
||||
getRunnerLogger(true)
|
||||
getRunnerLogger(true),
|
||||
createFeatureFlags([])
|
||||
);
|
||||
t.deepEqual(Object.keys(customStatusReport).length, 2);
|
||||
t.true(
|
||||
|
||||
@@ -213,7 +213,8 @@ export async function runQueries(
|
||||
threadsFlag: string,
|
||||
automationDetailsId: string | undefined,
|
||||
config: configUtils.Config,
|
||||
logger: Logger
|
||||
logger: Logger,
|
||||
featureFlags: FeatureFlags
|
||||
): Promise<QueriesStatusReport> {
|
||||
const statusReport: QueriesStatusReport = {};
|
||||
|
||||
@@ -256,7 +257,7 @@ export async function runQueries(
|
||||
}
|
||||
|
||||
try {
|
||||
if (await util.useCodeScanningConfigInCli(codeql)) {
|
||||
if (await util.useCodeScanningConfigInCli(codeql, featureFlags)) {
|
||||
// If we are using the codescanning config in the CLI,
|
||||
// much of the work needed to generate the query suites
|
||||
// is done in the CLI. We just need to make a single
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"maximumVersion": "3.7", "minimumVersion": "3.2"}
|
||||
{"maximumVersion": "3.7", "minimumVersion": "3.3"}
|
||||
|
||||
@@ -819,7 +819,11 @@ async function getCodeQLForCmd(
|
||||
}
|
||||
}
|
||||
|
||||
const configLocation = await generateCodescanningConfig(codeql, config);
|
||||
const configLocation = await generateCodescanningConfig(
|
||||
codeql,
|
||||
config,
|
||||
featureFlags
|
||||
);
|
||||
if (configLocation) {
|
||||
extraArgs.push(`--codescanning-config=${configLocation}`);
|
||||
}
|
||||
@@ -1269,9 +1273,10 @@ async function runTool(cmd: string, args: string[] = []) {
|
||||
*/
|
||||
async function generateCodescanningConfig(
|
||||
codeql: CodeQL,
|
||||
config: Config
|
||||
config: Config,
|
||||
featureFlags: FeatureFlags
|
||||
): Promise<string | undefined> {
|
||||
if (!(await util.useCodeScanningConfigInCli(codeql))) {
|
||||
if (!(await util.useCodeScanningConfigInCli(codeql, featureFlags))) {
|
||||
return;
|
||||
}
|
||||
const configLocation = path.resolve(config.tempDir, "user-config.yaml");
|
||||
|
||||
@@ -1704,7 +1704,7 @@ export async function initConfig(
|
||||
// When using the codescanning config in the CLI, pack downloads
|
||||
// happen in the CLI during the `database init` command, so no need
|
||||
// to download them here.
|
||||
if (!(await useCodeScanningConfigInCli(codeQL))) {
|
||||
if (!(await useCodeScanningConfigInCli(codeQL, featureFlags))) {
|
||||
const registries = parseRegistries(registriesInput);
|
||||
await downloadPacks(
|
||||
codeQL,
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20220908"
|
||||
"bundleVersion": "codeql-bundle-20220923"
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ export enum FeatureFlag {
|
||||
MlPoweredQueriesEnabled = "ml_powered_queries_enabled",
|
||||
TrapCachingEnabled = "trap_caching_enabled",
|
||||
GolangExtractionReconciliationEnabled = "golang_extraction_reconciliation_enabled",
|
||||
CliConfigFileEnabled = "cli_config_file_enabled",
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -517,7 +517,8 @@ program
|
||||
threads,
|
||||
cmd.category,
|
||||
config,
|
||||
logger
|
||||
logger,
|
||||
createFeatureFlags([])
|
||||
);
|
||||
|
||||
if (!cmd.upload) {
|
||||
|
||||
@@ -165,6 +165,7 @@ test("upload cache key contains right fields", async (t) => {
|
||||
const loggedMessages = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(util, "tryGetFolderBytes").resolves(999_999_999);
|
||||
const stubSave = sinon.stub(cache, "saveCache");
|
||||
process.env.GITHUB_SHA = "somesha";
|
||||
await uploadTrapCaches(stubCodeql, testConfigWithoutTmpDir, logger);
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import { promisify } from "util";
|
||||
|
||||
import * as cache from "@actions/cache";
|
||||
import getFolderSize from "get-folder-size";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { CodeQL, CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import { Language } from "./languages";
|
||||
import { Logger } from "./logging";
|
||||
import { codeQlVersionAbove } from "./util";
|
||||
import { codeQlVersionAbove, tryGetFolderBytes } from "./util";
|
||||
|
||||
// This constant should be bumped if we make a breaking change
|
||||
// to how the CodeQL Action stores or retrieves the TRAP cache,
|
||||
@@ -22,6 +20,10 @@ const CACHE_VERSION = 1;
|
||||
// This constant sets the size of each TRAP cache in megabytes.
|
||||
const CACHE_SIZE_MB = 1024;
|
||||
|
||||
// This constant sets the minimum size in megabytes of a TRAP
|
||||
// cache for us to consider it worth uploading.
|
||||
const MINIMUM_CACHE_MB_TO_UPLOAD = 10;
|
||||
|
||||
export async function getTrapCachingExtractorConfigArgs(
|
||||
config: Config
|
||||
): Promise<string[]> {
|
||||
@@ -138,6 +140,19 @@ export async function uploadTrapCaches(
|
||||
for (const language of config.languages) {
|
||||
const cacheDir = config.trapCaches[language];
|
||||
if (cacheDir === undefined) continue;
|
||||
const trapFolderSize = await tryGetFolderBytes(cacheDir, logger);
|
||||
if (trapFolderSize === undefined) {
|
||||
logger.info(
|
||||
`Skipping upload of TRAP cache for ${language} as we couldn't determine its size`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
if (trapFolderSize < MINIMUM_CACHE_MB_TO_UPLOAD * 1_048_576) {
|
||||
logger.info(
|
||||
`Skipping upload of TRAP cache for ${language} as it is too small`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
const key = await cacheKey(
|
||||
codeql,
|
||||
language,
|
||||
@@ -201,17 +216,12 @@ export async function getTotalCacheSize(
|
||||
trapCaches: Partial<Record<Language, string>>,
|
||||
logger: Logger
|
||||
): Promise<number> {
|
||||
try {
|
||||
const sizes = await Promise.all(
|
||||
Object.values(trapCaches).map(async (cacheDir) => {
|
||||
return promisify<string, number>(getFolderSize)(cacheDir);
|
||||
})
|
||||
);
|
||||
return sizes.reduce((a, b) => a + b, 0);
|
||||
} catch (e) {
|
||||
logger.warning(`Encountered an error while getting TRAP cache size: ${e}`);
|
||||
return 0;
|
||||
}
|
||||
const sizes = await Promise.all(
|
||||
Object.values(trapCaches).map((cacheDir) =>
|
||||
tryGetFolderBytes(cacheDir, logger)
|
||||
)
|
||||
);
|
||||
return sizes.map((a) => a || 0).reduce((a, b) => a + b, 0);
|
||||
}
|
||||
|
||||
async function cacheKey(
|
||||
|
||||
109
src/util.test.ts
109
src/util.test.ts
@@ -9,7 +9,9 @@ import test, { ExecutionContext } from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as api from "./api-client";
|
||||
import { CodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import { createFeatureFlags, FeatureFlag } from "./feature-flags";
|
||||
import { getRunnerLogger, Logger } from "./logging";
|
||||
import { setupTests } from "./testing-utils";
|
||||
import * as util from "./util";
|
||||
@@ -492,3 +494,110 @@ test("listFolder", async (t) => {
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
test("useCodeScanningConfigInCli with no env var", async (t) => {
|
||||
t.assert(
|
||||
!(await util.useCodeScanningConfigInCli(
|
||||
mockVersion("2.10.0"),
|
||||
createFeatureFlags([])
|
||||
))
|
||||
);
|
||||
|
||||
t.assert(
|
||||
!(await util.useCodeScanningConfigInCli(
|
||||
mockVersion("2.10.1"),
|
||||
createFeatureFlags([])
|
||||
))
|
||||
);
|
||||
|
||||
t.assert(
|
||||
!(await util.useCodeScanningConfigInCli(
|
||||
mockVersion("2.10.0"),
|
||||
createFeatureFlags([FeatureFlag.CliConfigFileEnabled])
|
||||
))
|
||||
);
|
||||
|
||||
// Yay! It works!
|
||||
t.assert(
|
||||
await util.useCodeScanningConfigInCli(
|
||||
mockVersion("2.10.1"),
|
||||
createFeatureFlags([FeatureFlag.CliConfigFileEnabled])
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
for (const val of ["TRUE", "true", "True"]) {
|
||||
test(`useCodeScanningConfigInCli with env var ${val}`, async (t) => {
|
||||
process.env[util.EnvVar.CODEQL_PASS_CONFIG_TO_CLI] = val;
|
||||
t.assert(
|
||||
!(await util.useCodeScanningConfigInCli(
|
||||
mockVersion("2.10.0"),
|
||||
createFeatureFlags([])
|
||||
))
|
||||
);
|
||||
|
||||
t.assert(
|
||||
!(await util.useCodeScanningConfigInCli(
|
||||
mockVersion("2.10.0"),
|
||||
createFeatureFlags([FeatureFlag.CliConfigFileEnabled])
|
||||
))
|
||||
);
|
||||
|
||||
// Yay! It works!
|
||||
t.assert(
|
||||
await util.useCodeScanningConfigInCli(
|
||||
mockVersion("2.10.1"),
|
||||
createFeatureFlags([FeatureFlag.CliConfigFileEnabled])
|
||||
)
|
||||
);
|
||||
|
||||
t.assert(
|
||||
await util.useCodeScanningConfigInCli(
|
||||
mockVersion("2.10.1"),
|
||||
createFeatureFlags([])
|
||||
)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
for (const val of ["FALSE", "false", "False"]) {
|
||||
test(`useCodeScanningConfigInCli with env var ${val}`, async (t) => {
|
||||
// Never turned on when env var is false
|
||||
process.env[util.EnvVar.CODEQL_PASS_CONFIG_TO_CLI] = val;
|
||||
t.assert(
|
||||
!(await util.useCodeScanningConfigInCli(
|
||||
mockVersion("2.10.0"),
|
||||
createFeatureFlags([])
|
||||
))
|
||||
);
|
||||
|
||||
t.assert(
|
||||
!(await util.useCodeScanningConfigInCli(
|
||||
mockVersion("2.10.0"),
|
||||
createFeatureFlags([FeatureFlag.CliConfigFileEnabled])
|
||||
))
|
||||
);
|
||||
|
||||
t.assert(
|
||||
!(await util.useCodeScanningConfigInCli(
|
||||
mockVersion("2.10.1"),
|
||||
createFeatureFlags([FeatureFlag.CliConfigFileEnabled])
|
||||
))
|
||||
);
|
||||
|
||||
t.assert(
|
||||
!(await util.useCodeScanningConfigInCli(
|
||||
mockVersion("2.10.1"),
|
||||
createFeatureFlags([])
|
||||
))
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function mockVersion(version) {
|
||||
return {
|
||||
async getVersion() {
|
||||
return version;
|
||||
},
|
||||
} as CodeQL;
|
||||
}
|
||||
|
||||
93
src/util.ts
93
src/util.ts
@@ -2,9 +2,11 @@ import * as fs from "fs";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
import { Readable } from "stream";
|
||||
import { promisify } from "util";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
import del from "del";
|
||||
import getFolderSize from "get-folder-size";
|
||||
import * as semver from "semver";
|
||||
|
||||
import * as api from "./api-client";
|
||||
@@ -160,7 +162,7 @@ export function getMemoryFlagValue(userInput: string | undefined): number {
|
||||
throw new Error(`Invalid RAM setting "${userInput}", specified.`);
|
||||
}
|
||||
} else {
|
||||
const totalMemoryBytes = os.totalmem();
|
||||
const totalMemoryBytes = getTotalMemoryBytes();
|
||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes();
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
|
||||
@@ -168,6 +170,43 @@ export function getMemoryFlagValue(userInput: string | undefined): number {
|
||||
return Math.floor(memoryToUseMegaBytes);
|
||||
}
|
||||
|
||||
export function getTotalMemoryBytes(): number {
|
||||
const nodeReportedMemory = os.totalmem();
|
||||
console.log(`Node reported ${nodeReportedMemory} bytes of memory.`);
|
||||
if (process.platform === "win32") {
|
||||
console.log("On Windows, so just returning the memory Node reported.");
|
||||
return nodeReportedMemory;
|
||||
}
|
||||
let lowestMemorySeen = nodeReportedMemory;
|
||||
try {
|
||||
const dockerMemoryLimit = parseInt(
|
||||
fs.readFileSync("/sys/fs/cgroup/memory/memory.limit_in_bytes", "utf8")
|
||||
);
|
||||
console.log(`Docker set a limit of ${dockerMemoryLimit} bytes of memory.`);
|
||||
lowestMemorySeen = Math.min(lowestMemorySeen, dockerMemoryLimit);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
try {
|
||||
const memoryInfo = fs.readFileSync("/proc/meminfo", "utf8").split("\n");
|
||||
const relevantLine = /^\s*MemTotal:\s*(\d+)\s*kB\s*$/;
|
||||
for (const line of memoryInfo) {
|
||||
const match = relevantLine.exec(line);
|
||||
if (match) {
|
||||
const memoryFromMemoryInfo = parseInt(match[1]) * 1024;
|
||||
console.log(
|
||||
`Found total memory of ${memoryFromMemoryInfo} in memory info.`
|
||||
);
|
||||
lowestMemorySeen = Math.min(lowestMemorySeen, memoryFromMemoryInfo);
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
return lowestMemorySeen;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
||||
* specified, the total available memory will be used minus a threshold
|
||||
@@ -487,7 +526,7 @@ export enum Mode {
|
||||
* CLI. These environment variables are relevant for both the runner
|
||||
* and the action.
|
||||
*/
|
||||
enum EnvVar {
|
||||
export enum EnvVar {
|
||||
/**
|
||||
* The mode of the codeql-action, either 'actions' or 'runner'.
|
||||
*/
|
||||
@@ -591,6 +630,10 @@ export function getRequiredEnvParam(paramName: string): string {
|
||||
return value;
|
||||
}
|
||||
|
||||
function getOptionalEnvParam(paramName: string): string {
|
||||
return process.env[paramName] || "";
|
||||
}
|
||||
|
||||
export class HTTPError extends Error {
|
||||
public status: number;
|
||||
|
||||
@@ -787,12 +830,28 @@ export function isInTestMode(): boolean {
|
||||
* that gets passed to the CLI.
|
||||
*/
|
||||
export async function useCodeScanningConfigInCli(
|
||||
codeql: CodeQL
|
||||
codeql: CodeQL,
|
||||
featureFlags: FeatureFlags
|
||||
): Promise<boolean> {
|
||||
return (
|
||||
process.env[EnvVar.CODEQL_PASS_CONFIG_TO_CLI] === "true" &&
|
||||
(await codeQlVersionAbove(codeql, CODEQL_VERSION_CONFIG_FILES))
|
||||
);
|
||||
const envVarIsEnabled = getOptionalEnvParam(EnvVar.CODEQL_PASS_CONFIG_TO_CLI);
|
||||
|
||||
// If the user has explicitly turned off the feature, then don't use it.
|
||||
if (envVarIsEnabled.toLocaleLowerCase() === "false") {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the user has explicitly turned on the feature, then use it.
|
||||
// Or if the feature flag is enabled, then use it.
|
||||
const isEnabled =
|
||||
envVarIsEnabled.toLocaleLowerCase() === "true" ||
|
||||
(await featureFlags.getValue(FeatureFlag.CliConfigFileEnabled));
|
||||
|
||||
if (!isEnabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the CLI version is too old, then don't use it.
|
||||
return await codeQlVersionAbove(codeql, CODEQL_VERSION_CONFIG_FILES);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -836,3 +895,23 @@ export async function isGoExtractionReconciliationEnabled(
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the size a folder in bytes. This will log any filesystem errors
|
||||
* as a warning and then return undefined.
|
||||
*
|
||||
* @param cacheDir A directory to get the size of.
|
||||
* @param logger A logger to log any errors to.
|
||||
* @returns The size in bytes of the folder, or undefined if errors occurred.
|
||||
*/
|
||||
export async function tryGetFolderBytes(
|
||||
cacheDir: string,
|
||||
logger: Logger
|
||||
): Promise<number | undefined> {
|
||||
try {
|
||||
return await promisify<string, number>(getFolderSize)(cacheDir);
|
||||
} catch (e) {
|
||||
logger.warning(`Encountered an error while getting size of folder: ${e}`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user