Compare commits

..

1 Commits

Author SHA1 Message Date
Marco Gario
d5eefcf88f Util for optional/required inputs 2020-09-15 14:35:13 +02:00
802 changed files with 154970 additions and 95367 deletions

View File

@@ -18,20 +18,15 @@
"import/no-amd": "error",
"import/no-commonjs": "error",
"import/no-dynamic-require": "error",
"import/no-extraneous-dependencies": ["error"],
"import/no-extraneous-dependencies": ["error", {"devDependencies": false}],
"import/no-namespace": "off",
"import/no-unresolved": "error",
"import/no-webpack-loader-syntax": "error",
"import/order": ["error", {
"alphabetize": {"order": "asc"},
"newlines-between": "always"
}],
"no-async-foreach/no-async-foreach": "error",
"no-console": "off",
"no-sequences": "error",
"no-shadow": "off",
"@typescript-eslint/no-shadow": ["error"],
"one-var": ["error", "never"]
"one-var": ["error", "never"],
"sort-imports": ["error", { "allowSeparatedGroups": true }]
},
"overrides": [{
// "temporarily downgraded during transition to eslint
@@ -44,11 +39,21 @@
"@typescript-eslint/no-unsafe-call": "off",
"@typescript-eslint/no-unsafe-member-access": "off",
"@typescript-eslint/no-unsafe-return": "off",
"@typescript-eslint/no-unused-vars": "off",
"@typescript-eslint/no-var-requires": "off",
"@typescript-eslint/prefer-regexp-exec": "off",
"@typescript-eslint/require-await": "off",
"@typescript-eslint/restrict-template-expressions": "off",
"func-style": "off"
"eslint-comments/no-use": "off",
"func-style": "off",
"github/array-foreach": "off",
"github/no-then": "off",
"import/no-extraneous-dependencies": "off",
"no-shadow": "off",
"no-sparse-arrays": "off",
"no-throw-literal": "off",
"no-useless-escape": "off",
"sort-imports": "off"
}
}]
}

View File

@@ -1,5 +1,5 @@
blank_issues_enabled: true
contact_links:
- name: Contact GitHub Support
url: https://support.github.com/request
about: Contact Support
url: https://support.github.com/contact?subject=Code+Scanning+Beta+Support&tags=code-scanning-support
about: Contact Support about code scanning

View File

@@ -35,7 +35,7 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
commits_without_pull_requests = []
for commit in all_commits:
pr = get_pr_for_commit(repo, commit)
if pr is None:
commits_without_pull_requests.append(commit)
elif not any(p for p in pull_requests if p.number == pr.number):
@@ -47,7 +47,7 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
# Sort PRs and commits by age
pull_requests = sorted(pull_requests, key=lambda pr: pr.number)
commits_without_pull_requests = sorted(commits_without_pull_requests, key=lambda c: c.commit.author.date)
# Start constructing the body text
body = 'Merging ' + short_main_sha + ' into ' + LATEST_RELEASE_BRANCH
@@ -62,7 +62,7 @@ def open_pr(repo, all_commits, short_main_sha, branch_name):
body += '\n- #' + str(pr.number)
body += ' - ' + pr.title
body += ' (@' + merger + ')'
# List all commits not part of a PR
if len(commits_without_pull_requests) > 0:
body += '\n\nContains the following commits not from a pull request:'
@@ -86,7 +86,7 @@ def get_conductor(repo, pull_requests, other_commits):
# If there are any PRs then use whoever merged the last one
if len(pull_requests) > 0:
return get_merger_of_pr(repo, pull_requests[-1])
# Otherwise take the author of the latest commit
return other_commits[-1].author.login
@@ -95,7 +95,7 @@ def get_conductor(repo, pull_requests, other_commits):
# This will not include any commits that exist on the release branch
# that aren't on main.
def get_commit_difference(repo):
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '..' + MAIN_BRANCH).strip().split('\n')
commits = run_git('log', '--pretty=format:%H', ORIGIN + '/' + LATEST_RELEASE_BRANCH + '...' + MAIN_BRANCH).strip().split('\n')
# Convert to full-fledged commit objects
commits = [repo.get_commit(c) for c in commits]
@@ -119,12 +119,12 @@ def get_truncated_commit_message(commit):
# Returns the PR object, or None if no PR could be found.
def get_pr_for_commit(repo, commit):
prs = commit.get_pulls()
if prs.totalCount > 0:
# In the case that there are multiple PRs, return the earliest one
prs = list(prs)
sorted_prs = sorted(prs, key=lambda pr: int(pr.number))
return sorted_prs[0]
sorted(prs, key=lambda pr: int(pr.number))
return prs[0]
else:
return None
@@ -165,7 +165,7 @@ def main():
if branch_exists_on_remote(new_branch_name):
print('Branch ' + new_branch_name + ' already exists. Nothing to do.')
return
# Create the new branch and push it to the remote
print('Creating branch ' + new_branch_name)
run_git('checkout', '-b', new_branch_name, MAIN_BRANCH)

View File

@@ -1,22 +0,0 @@
name: Check Expected Release Files
on:
pull_request:
paths:
- .github/workflows/check-expected-release-files.yml
- src/defaults.json
jobs:
check-expected-release-files:
runs-on: ubuntu-latest
steps:
- name: Checkout CodeQL Action
uses: actions/checkout@v2
- name: Check Expected Release Files
run: |
bundle_version="$(cat "./src/defaults.json" | jq -r ".bundleVersion")"
set -x
for expected_file in "codeql-bundle.tar.gz" "codeql-bundle-linux64.tar.gz" "codeql-bundle-osx64.tar.gz" "codeql-bundle-win64.tar.gz" "codeql-runner-linux" "codeql-runner-macos" "codeql-runner-win.exe"; do
curl --location --fail --head --request GET "https://github.com/github/codeql-action/releases/download/$bundle_version/$expected_file" > /dev/null
done

View File

@@ -1,10 +1,6 @@
name: "CodeQL action"
on:
push:
branches: [main, v1]
pull_request:
branches: [main, v1]
on: [push, pull_request]
jobs:
build:
@@ -15,12 +11,18 @@ jobs:
steps:
- uses: actions/checkout@v2
with:
# Must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head of the pull request.
fetch-depth: 2
# If this run was triggered by a pull request event then checkout
# the head of the pull request instead of the merge commit.
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}
- uses: ./init
id: init
with:
languages: javascript
config-file: ./.github/codeql/codeql-config.yml
# confirm steps.init.outputs.codeql-path points to the codeql binary
- name: Print CodeQL Version
run: ${{steps.init.outputs.codeql-path}} version --format=json
- uses: ./analyze

View File

@@ -0,0 +1,446 @@
name: "Integration Testing"
on: [push, pull_request]
jobs:
multi-language-repo_test-autodetect-languages:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- uses: ./../action/init
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
- run: |
cd "$RUNNER_TEMP/codeql_databases"
# List all directories as there will be precisely one directory per database
# but there may be other files in this directory such as query suites.
if [ "$(ls -d */ | wc -l)" != 6 ] || \
[[ ! -d cpp ]] || \
[[ ! -d csharp ]] || \
[[ ! -d go ]] || \
[[ ! -d java ]] || \
[[ ! -d javascript ]] || \
[[ ! -d python ]]; then
echo "Did not find expected number of databases. Database dir contains: $(ls)"
exit 1
fi
multi-language-repo_test-custom-queries-and-remote-config:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- uses: ./../action/init
with:
languages: cpp,csharp,java,javascript,python
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
# Currently is not possible to analyze Go in conjunction with other languages in macos
multi-language-repo_test-go-custom-queries:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/setup-go@v2
if: ${{ matrix.os == 'macos-latest' }}
with:
go-version: '^1.13.1'
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- uses: ./../action/init
with:
languages: go
config-file: ./.github/codeql/custom-queries.yml
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
multi-language-repo_rubocop:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6
- name: Install Code Scanning integration
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
- name: Install dependencies
run: bundle install
- name: Rubocop run
run: |
bash -c "
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
[[ $? -ne 2 ]]
"
- uses: ./../action/upload-sarif
with:
sarif_file: rubocop.sarif
env:
TEST_MODE: true
test-proxy:
runs-on: ubuntu-latest
container:
image: ubuntu:18.04
options: --dns 127.0.0.1
services:
squid-proxy:
image: datadog/squid:latest
ports:
- 3128:3128
env:
https_proxy: http://squid-proxy:3128
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- uses: ./../action/init
with:
languages: javascript
- uses: ./../action/analyze
env:
TEST_MODE: true
runner-analyze-javascript-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
# Pass --config-file here, but not for other jobs in this workflow.
# This means we're testing the config file parsing in the runner
# but not slowing down all jobs unnecessarily as it doesn't add much
# testing the parsing on different operating systems and languages.
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
. ./codeql-runner/codeql-env.sh
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-ubuntu:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
../action/runner/dist/codeql-runner-linux autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
../action/runner/dist/codeql-runner-win.exe autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
../action/runner/dist/codeql-runner-macos autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-upload-sarif:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Upload with runner
run: |
# Deliberately don't use TEST_MODE here. This is specifically testing
# the compatibility with the API.
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}

View File

@@ -1,12 +1,6 @@
name: "PR checks"
env:
GO111MODULE: auto
on:
push:
branches: [main, v1]
pull_request:
on: [push, pull_request]
jobs:
lint-js:
@@ -23,7 +17,25 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Check generated JavaScript
run: .github/workflows/script/check-js.sh
run: |
# Sanity check that repo is clean to start with
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then this workflow needs attention...
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Wipe the lib directory incase there are extra unnecessary files in there
rm -rf lib
# Generate the JavaScript files
npm run-script build
# Check that repo is still clean
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
git status
exit 1
fi
echo "Success: JavaScript files are up to date"
check-node-modules:
runs-on: ubuntu-latest
@@ -31,10 +43,27 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Check node modules up to date
run: .github/workflows/script/check-node-modules.sh
run: |
# Sanity check that repo is clean to start with
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then this workflow needs attention...
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Reinstall modules and then clean to remove absolute paths
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
npm ci
npm run removeNPMAbsolutePaths
# Check that repo is still clean
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
git status
exit 1
fi
echo "Success: node_modules are up to date"
npm-test:
needs: [check-js, check-node-modules]
strategy:
matrix:
os: [ubuntu-latest,macos-latest]
@@ -44,533 +73,3 @@ jobs:
- uses: actions/checkout@v2
- name: npm run-script test
run: npm run-script test
multi-language-repo_test-autodetect-languages:
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- uses: ./../action/init
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
- run: |
cd "$RUNNER_TEMP/codeql_databases"
# List all directories as there will be precisely one directory per database
# but there may be other files in this directory such as query suites.
if [ "$(ls -d */ | wc -l)" != 6 ] || \
[[ ! -d cpp ]] || \
[[ ! -d csharp ]] || \
[[ ! -d go ]] || \
[[ ! -d java ]] || \
[[ ! -d javascript ]] || \
[[ ! -d python ]]; then
echo "Did not find expected number of databases. Database dir contains: $(ls)"
exit 1
fi
multi-language-repo_test-custom-queries-and-remote-config:
needs: [check-js, check-node-modules]
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
tools: [~, latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- uses: ./../action/init
with:
tools: ${{ matrix.tools }}
languages: cpp,csharp,java,javascript,python
config-file: github/codeql-action/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
# Currently is not possible to analyze Go in conjunction with other languages in macos
multi-language-repo_test-go-custom-queries:
needs: [check-js, check-node-modules]
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/setup-go@v2
if: ${{ matrix.os == 'macos-latest' }}
with:
go-version: '^1.13.1'
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- uses: ./../action/init
with:
languages: go
config-file: ./.github/codeql/custom-queries.yml
- name: Build code
shell: bash
run: ./build.sh
- uses: ./../action/analyze
env:
TEST_MODE: true
go-custom-tracing:
needs: [check-js, check-node-modules]
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
env:
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
steps:
- uses: actions/setup-go@v2
if: ${{ matrix.os == 'macos-latest' }}
with:
go-version: '^1.13.1'
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- uses: ./../action/init
with:
languages: go
- name: Build code
shell: bash
run: go build main.go
- uses: ./../action/analyze
env:
TEST_MODE: true
go-custom-tracing-autobuild:
needs: [check-js, check-node-modules]
# No need to test Go autobuild on multiple OSes since
# we're testing Go custom tracing with a manual build on all OSes.
runs-on: ubuntu-latest
env:
CODEQL_EXTRACTOR_GO_BUILD_TRACING: "on"
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- uses: ./../action/init
with:
languages: go
- uses: ./../action/autobuild
- uses: ./../action/analyze
env:
TEST_MODE: true
multi-language-repo_rubocop:
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6
- name: Install Code Scanning integration
run: bundle add code-scanning-rubocop --version 0.3.0 --skip-install
- name: Install dependencies
run: bundle install
- name: Rubocop run
run: |
bash -c "
bundle exec rubocop --require code_scanning --format CodeScanning::SarifFormatter -o rubocop.sarif
[[ $? -ne 2 ]]
"
- uses: ./../action/upload-sarif
with:
sarif_file: rubocop.sarif
env:
TEST_MODE: true
test-proxy:
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
container:
image: ubuntu:18.04
options: --dns 127.0.0.1
services:
squid-proxy:
image: datadog/squid:latest
ports:
- 3128:3128
env:
https_proxy: http://squid-proxy:3128
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- uses: ./../action/init
with:
languages: javascript
- uses: ./../action/analyze
env:
TEST_MODE: true
runner-analyze-javascript-ubuntu:
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
# Pass --config-file here, but not for other jobs in this workflow.
# This means we're testing the config file parsing in the runner
# but not slowing down all jobs unnecessarily as it doesn't add much
# testing the parsing on different operating systems and languages.
runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-windows:
needs: [check-js, check-node-modules]
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages javascript --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-javascript-macos:
needs: [check-js, check-node-modules]
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Run init
run: |
runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages javascript --config-file ./.github/codeql/codeql-config.yml --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Run analyze
run: |
runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-ubuntu:
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
. ./codeql-runner/codeql-env.sh
$CODEQL_RUNNER dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-windows:
needs: [check-js, check-node-modules]
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
cat ./codeql-runner/codeql-env.sh | Invoke-Expression
& $Env:CODEQL_RUNNER dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-macos:
needs: [check-js, check-node-modules]
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
. ./codeql-runner/codeql-env.sh
$CODEQL_RUNNER dotnet build
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-ubuntu:
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-linux init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
run: |
../action/runner/dist/codeql-runner-linux autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-linux analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-windows:
needs: [check-js, check-node-modules]
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-win.exe init --repository $Env:GITHUB_REPOSITORY --languages csharp --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: powershell
run: |
../action/runner/dist/codeql-runner-win.exe autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-win.exe analyze --repository $Env:GITHUB_REPOSITORY --commit $Env:GITHUB_SHA --ref $Env:GITHUB_REF --github-url $Env:GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-analyze-csharp-autobuild-macos:
needs: [check-js, check-node-modules]
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Move codeql-action
shell: bash
run: |
mkdir ../action
mv * .github ../action/
mv ../action/tests/multi-language-repo/{*,.github} .
mv ../action/.github/workflows .github
- name: Build runner
run: |
cd ../action/runner
npm install
npm run build-runner
- name: Run init
run: |
../action/runner/dist/codeql-runner-macos init --repository $GITHUB_REPOSITORY --languages csharp --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
- name: Build code
shell: bash
run: |
../action/runner/dist/codeql-runner-macos autobuild
- name: Run analyze
run: |
../action/runner/dist/codeql-runner-macos analyze --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}
env:
TEST_MODE: true
runner-upload-sarif:
needs: [check-js, check-node-modules]
runs-on: ubuntu-latest
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id }}
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- name: Upload with runner
run: |
# Deliberately don't use TEST_MODE here. This is specifically testing
# the compatibility with the API.
runner/dist/codeql-runner-linux upload --sarif-file src/testdata/empty-sarif.sarif --repository $GITHUB_REPOSITORY --commit $GITHUB_SHA --ref $GITHUB_REF --github-url $GITHUB_SERVER_URL --github-auth ${{ github.token }}

View File

@@ -1,133 +0,0 @@
name: Test Python Package Installation on Linux and Mac
on:
push:
branches: [main, v1]
pull_request:
jobs:
test-setup-python-scripts:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest]
include:
- test_dir: python-setup/tests/pipenv/requests-2
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 2
- test_dir: python-setup/tests/pipenv/requests-3
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 3
- test_dir: python-setup/tests/poetry/requests-2
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 2
- test_dir: python-setup/tests/poetry/requests-3
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 3
- test_dir: python-setup/tests/requirements/requests-2
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 2
- test_dir: python-setup/tests/requirements/requests-3
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 3
- test_dir: python-setup/tests/setup_py/requests-2
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 2
- test_dir: python-setup/tests/setup_py/requests-3
test_script: $GITHUB_WORKSPACE/python-setup/tests/check_requests_123.sh 3
# This one shouldn't fail, but also won't install packages
- test_dir: python-setup/tests/requirements/non-standard-location
test_script: test -z $LGTM_INDEX_IMPORT_PATH
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
- name: Initialize CodeQL
uses: ./init
id: init
with:
tools: latest
languages: python
setup-python-dependencies: false
- name: Test Auto Package Installation
run: |
set -x
$GITHUB_WORKSPACE/python-setup/install_tools.sh
cd $GITHUB_WORKSPACE/${{ matrix.test_dir }}
case ${{ matrix.os }} in
ubuntu-latest*) basePath="/opt";;
macos-latest*) basePath="/Users/runner";;
esac
echo ${basePath}
$GITHUB_WORKSPACE/python-setup/auto_install_packages.py "$(dirname ${{steps.init.outputs.codeql-path}})"
- name: Setup for extractor
run: |
echo $CODEQL_PYTHON
# only run if $CODEQL_PYTHON is set
if [ ! -z $CODEQL_PYTHON ]; then
$GITHUB_WORKSPACE/python-setup/tests/from_python_exe.py $CODEQL_PYTHON;
fi
- name: Verify packages installed
run: |
${{ matrix.test_script }}
test-setup-python-scripts-windows:
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
include:
- test_dir: python-setup/tests/pipenv/requests-2
python_version: 2
- test_dir: python-setup/tests/pipenv/requests-3
python_version: 3
- test_dir: python-setup/tests/poetry/requests-2
python_version: 2
- test_dir: python-setup/tests/poetry/requests-3
python_version: 3
- test_dir: python-setup/tests/requirements/requests-2
python_version: 2
- test_dir: python-setup/tests/requirements/requests-3
python_version: 3
- test_dir: python-setup/tests/setup_py/requests-2
python_version: 2
- test_dir: python-setup/tests/setup_py/requests-3
python_version: 3
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
- name: Initialize CodeQL
uses: ./init
with:
tools: latest
languages: python
setup-python-dependencies: false
- name: Test Auto Package Installation
run: |
$cmd = $Env:GITHUB_WORKSPACE + "\\python-setup\\install_tools.ps1"
powershell -File $cmd
cd $Env:GITHUB_WORKSPACE\\${{ matrix.test_dir }}
$DefaultsPath = Join-Path (Join-Path $Env:GITHUB_WORKSPACE "src") "defaults.json"
$CodeQLBundleName = (Get-Content -Raw -Path $DefaultsPath | ConvertFrom-Json).bundleVersion
$CodeQLVersion = "0.0.0-" + $CodeQLBundleName.split("-")[-1]
py -3 $Env:GITHUB_WORKSPACE\\python-setup\\auto_install_packages.py C:\\hostedtoolcache\\windows\\CodeQL\\$CodeQLVersion\\x64\\codeql
- name: Setup for extractor
run: |
echo $Env:CODEQL_PYTHON
py -3 $Env:GITHUB_WORKSPACE\\python-setup\\tests\\from_python_exe.py $Env:CODEQL_PYTHON
- name: Verify packages installed
run: |
$cmd = $Env:GITHUB_WORKSPACE + "\\python-setup\\tests\\check_requests_123.ps1"
powershell -File $cmd ${{ matrix.python_version }}

View File

@@ -1,54 +0,0 @@
name: Release runner
on:
workflow_dispatch:
inputs:
bundle-tag:
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
required: false
jobs:
release-runner:
runs-on: ubuntu-latest
env:
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
strategy:
matrix:
extension: ["linux", "macos", "win.exe"]
steps:
- uses: actions/checkout@v2
- name: Build runner
run: |
cd runner
npm install
npm run build-runner
- uses: actions/upload-artifact@v2
with:
name: codeql-runner-${{matrix.extension}}
path: runner/dist/codeql-runner-${{matrix.extension}}
- name: Resolve Upload URL for the release
if: ${{ github.event.inputs.bundle-tag != null }}
id: save_url
run: |
UPLOAD_URL=$(curl -sS \
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
-H "Accept: application/json" \
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
echo ${UPLOAD_URL}
echo "::set-output name=upload_url::${UPLOAD_URL}"
- name: Upload Platform Package
if: ${{ github.event.inputs.bundle-tag != null }}
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.save_url.outputs.upload_url }}
asset_path: runner/dist/codeql-runner-${{matrix.extension}}
asset_name: codeql-runner-${{matrix.extension}}
asset_content_type: application/octet-stream

View File

@@ -1,21 +0,0 @@
#!/bin/bash
set -eu
# Sanity check that repo is clean to start with
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then this workflow needs attention...
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Wipe the lib directory incase there are extra unnecessary files in there
rm -rf lib
# Generate the JavaScript files
npm run-script build
# Check that repo is still clean
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
>&2 echo "Failed: JavaScript files are not up to date. Run 'npm run-script build' to update"
git status
exit 1
fi
echo "Success: JavaScript files are up to date"

View File

@@ -1,21 +0,0 @@
#!/bin/bash
set -eu
# Sanity check that repo is clean to start with
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then this workflow needs attention...
>&2 echo "Failed: Repo should be clean before testing!"
exit 1
fi
# Reinstall modules and then clean to remove absolute paths
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
npm ci
npm run removeNPMAbsolutePaths
# Check that repo is still clean
if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
>&2 echo "Failed: node_modules are not up to date. Run 'npm ci' and 'npm run removeNPMAbsolutePaths' to update"
git status
exit 1
fi
echo "Success: node_modules are up to date"

View File

@@ -1,73 +0,0 @@
#
# Split the CodeQL Bundle into platform bundles
#
# Instructions:
# 1. Upload the new codeql-bundle (codeql-bundle.tar.gz) as an asset of the
# release (codeql-bundle-20200826)
# 2. Take note of the CLI Release used by the bundle (e.g., v2.2.5)
# 3. Manually launch this workflow file (via the Actions UI) specifying
# - The CLI Release (e.g., v2.2.5)
# - The release tag (e.g., codeql-bundle-20200826)
# 4. If everything succeeds you should see 3 new assets.
#
name: Split Bundle
on:
workflow_dispatch:
inputs:
cli-release:
description: 'CodeQL CLI Release (e.g., "v2.2.5")'
required: true
bundle-tag:
description: 'Tag of the bundle release (e.g., "codeql-bundle-20200826")'
required: true
jobs:
build:
runs-on: ubuntu-latest
env:
CLI_RELEASE: "${{ github.event.inputs.cli-release }}"
RELEASE_TAG: "${{ github.event.inputs.bundle-tag }}"
strategy:
fail-fast: false
matrix:
platform: ["linux64", "osx64", "win64"]
steps:
- name: Resolve Upload URL for the release
id: save_url
run: |
UPLOAD_URL=$(curl -sS \
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${RELEASE_TAG}" \
-H "Accept: application/json" \
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" | jq .upload_url | sed s/\"//g)
echo ${UPLOAD_URL}
echo "::set-output name=upload_url::${UPLOAD_URL}"
- name: Download CodeQL CLI and Bundle
run: |
wget --no-verbose "https://github.com/${GITHUB_REPOSITORY}/releases/download/${RELEASE_TAG}/codeql-bundle.tar.gz"
wget --no-verbose "https://github.com/github/codeql-cli-binaries/releases/download/${CLI_RELEASE}/codeql-${{matrix.platform}}.zip"
- name: Create Platform Package
# Replace the codeql-binaries with the platform specific ones
run: |
gunzip codeql-bundle.tar.gz
tar -f codeql-bundle.tar --delete codeql
unzip -q codeql-${{matrix.platform}}.zip
tar -f codeql-bundle.tar --append codeql
gzip codeql-bundle.tar
mv codeql-bundle.tar.gz codeql-bundle-${{matrix.platform}}.tar.gz
du -sh codeql-bundle-${{matrix.platform}}.tar.gz
- name: Upload Platform Package
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.save_url.outputs.upload_url }}
asset_path: ./codeql-bundle-${{matrix.platform}}.tar.gz
asset_name: codeql-bundle-${{matrix.platform}}.tar.gz
asset_content_type: application/tar+gzip

View File

@@ -12,7 +12,6 @@ on:
jobs:
update:
runs-on: ubuntu-latest
if: ${{ github.repository == 'github/codeql-action' }}
steps:
- uses: actions/checkout@v2
with:

View File

@@ -1,43 +0,0 @@
name: Update Supported Enterprise Server Versions
on:
schedule:
- cron: "0 0 * * *"
jobs:
update-supported-enterprise-server-versions:
runs-on: ubuntu-latest
steps:
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: "3.7"
- name: Checkout CodeQL Action
uses: actions/checkout@v2
- name: Checkout Enterprise Releases
uses: actions/checkout@v2
with:
repository: github/enterprise-releases
ssh-key: ${{ secrets.ENTERPRISE_RELEASES_SSH_KEY }}
path: ${{ github.workspace }}/enterprise-releases/
- name: Update Supported Enterprise Server Versions
run: |
cd ./.github/workflows/update-supported-enterprise-server-versions/
python3 -m pip install pipenv
pipenv install
pipenv run ./update.py
rm --recursive "$ENTERPRISE_RELEASES_PATH"
npm run build
env:
ENTERPRISE_RELEASES_PATH: ${{ github.workspace }}/enterprise-releases/
- name: Commit Changes
uses: peter-evans/create-pull-request@c7f493a8000b8aeb17a1332e326ba76b57cb83eb # v3.4.1
with:
commit-message: Update supported GitHub Enterprise Server versions.
title: Update supported GitHub Enterprise Server versions.
body: ""
author: GitHub <noreply@github.com>
branch: update-supported-enterprise-server-versions
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,9 +0,0 @@
[[source]]
name = "pypi"
url = "https://pypi.org/simple"
verify_ssl = true
[dev-packages]
[packages]
semver = "*"

View File

@@ -1,27 +0,0 @@
{
"_meta": {
"hash": {
"sha256": "e3ba923dcb4888e05de5448c18a732bf40197e80fabfa051a61c01b22c504879"
},
"pipfile-spec": 6,
"requires": {},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"semver": {
"hashes": [
"sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4",
"sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"
],
"index": "pypi",
"version": "==2.13.0"
}
},
"develop": {}
}

View File

@@ -1,43 +0,0 @@
#!/usr/bin/env python3
import datetime
import json
import os
import pathlib
import semver
_API_COMPATIBILITY_PATH = pathlib.Path(__file__).absolute().parents[3] / "src" / "api-compatibility.json"
_ENTERPRISE_RELEASES_PATH = pathlib.Path(os.environ["ENTERPRISE_RELEASES_PATH"])
_RELEASE_FILE_PATH = _ENTERPRISE_RELEASES_PATH / "releases.json"
_FIRST_SUPPORTED_RELEASE = semver.VersionInfo.parse("2.22.0") # Versions older than this did not include Code Scanning.
def main():
api_compatibility_data = json.loads(_API_COMPATIBILITY_PATH.read_text())
releases = json.loads(_RELEASE_FILE_PATH.read_text())
oldest_supported_release = None
newest_supported_release = semver.VersionInfo.parse(api_compatibility_data["maximumVersion"] + ".0")
for release_version_string, release_data in releases.items():
release_version = semver.VersionInfo.parse(release_version_string + ".0")
if release_version < _FIRST_SUPPORTED_RELEASE:
continue
if release_version > newest_supported_release:
feature_freeze_date = datetime.date.fromisoformat(release_data["feature_freeze"])
if feature_freeze_date < datetime.date.today() + datetime.timedelta(weeks=2):
newest_supported_release = release_version
if oldest_supported_release is None or release_version < oldest_supported_release:
end_of_life_date = datetime.date.fromisoformat(release_data["end"])
if end_of_life_date > datetime.date.today():
oldest_supported_release = release_version
api_compatibility_data = {
"minimumVersion": f"{oldest_supported_release.major}.{oldest_supported_release.minor}",
"maximumVersion": f"{newest_supported_release.major}.{newest_supported_release.minor}",
}
_API_COMPATIBILITY_PATH.write_text(json.dumps(api_compatibility_data, sort_keys=True) + "\n")
if __name__ == "__main__":
main()

View File

@@ -36,7 +36,6 @@ It is possible to run this action locally via [act](https://github.com/nektos/ac
```bash
CODEQL_LOCAL_RUN=true
GITHUB_SERVER_URL=https://github.com
# Optional, for better logging
GITHUB_JOB=<ANY_JOB_NAME>

View File

@@ -22,16 +22,7 @@ on:
push:
pull_request:
schedule:
# ┌───────────── minute (0 - 59)
# │ ┌───────────── hour (0 - 23)
# │ │ ┌───────────── day of the month (1 - 31)
# │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
# │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
# │ │ │ │ │
# │ │ │ │ │
# │ │ │ │ │
# * * * * *
- cron: '30 1 * * 0'
- cron: '0 0 * * 0'
jobs:
CodeQL-Build:
@@ -41,6 +32,17 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v2
with:
# Must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head of the pull request.
# Only include this option if you are running this workflow on pull requests.
fetch-depth: 2
# If this run was triggered by a pull request event then checkout
# the head of the pull request instead of the merge commit.
# Only include this step if you are running this workflow on pull requests.
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
@@ -96,16 +98,7 @@ Use the `config-file` parameter of the `init` action to enable the configuration
config-file: ./.github/codeql/codeql-config.yml
```
The configuration file can be located in a different repository. This is useful if you want to share the same configuration across multiple repositories. If the configuration file is in a private repository you can also specify an `external-repository-token` option. This should be a personal access token that has read access to any repositories containing referenced config files and queries.
```yaml
- uses: github/codeql-action/init@v1
with:
config-file: owner/repo/codeql-config.yml@branch
external-repository-token: ${{ secrets.EXTERNAL_REPOSITORY_TOKEN }}
```
For information on how to write a configuration file, see "[Using a custom configuration file](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration-file)."
The configuration file must be located within the local repository. For information on how to write a configuration file, see "[Using a custom configuration file](https://help.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#using-a-custom-configuration-file)."
If you only want to customise the queries used, you can specify them in your workflow instead of creating a config file, using the `queries` property of the `init` action:

View File

@@ -19,12 +19,12 @@ inputs:
add-snippets:
description: Specify whether or not to add code snippets to the output sarif file.
required: false
default: "false"
default: "true"
threads:
description: The number of threads to be used by CodeQL.
required: false
checkout_path:
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
description: "The path at which the analyzed repository was checked out. Used to relativeize any absolute paths in the uploaded SARIF file."
required: false
default: ${{ github.workspace }}
token:

View File

@@ -1,5 +1,5 @@
name: 'CodeQL: Init'
description: 'Set up CodeQL'
description: 'Setup the CodeQL tracer'
author: 'GitHub'
inputs:
tools:
@@ -19,16 +19,6 @@ inputs:
queries:
description: Comma-separated list of additional queries to run. By default, this overrides the same setting in a configuration file; prefix with "+" to use both sets of queries.
required: false
external-repository-token:
description: A token for fetching external config files and queries if they reside in a private repository.
required: false
setup-python-dependencies:
description: Try to auto-install your python dependencies
required: true
default: 'true'
outputs:
codeql-path:
description: The path of the CodeQL binary used for analysis
runs:
using: 'node12'
main: '../lib/init-action.js'

513
lib/actions-util.js generated
View File

@@ -1,513 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const core = __importStar(require("@actions/core"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
const yaml = __importStar(require("js-yaml"));
const api = __importStar(require("./api-client"));
const sharedEnv = __importStar(require("./shared-environment"));
const util_1 = require("./util");
/**
* Wrapper around core.getInput for inputs that always have a value.
* Also see getOptionalInput.
*
* This allows us to get stronger type checking of required/optional inputs
* and make behaviour more consistent between actions and the runner.
*/
function getRequiredInput(name) {
return core.getInput(name, { required: true });
}
exports.getRequiredInput = getRequiredInput;
/**
* Wrapper around core.getInput that converts empty inputs to undefined.
* Also see getRequiredInput.
*
* This allows us to get stronger type checking of required/optional inputs
* and make behaviour more consistent between actions and the runner.
*/
function getOptionalInput(name) {
const value = core.getInput(name);
return value.length > 0 ? value : undefined;
}
exports.getOptionalInput = getOptionalInput;
/**
* Get an environment parameter, but throw an error if it is not set.
*/
function getRequiredEnvParam(paramName) {
const value = process.env[paramName];
if (value === undefined || value.length === 0) {
throw new Error(`${paramName} environment variable must be set`);
}
core.debug(`${paramName}=${value}`);
return value;
}
exports.getRequiredEnvParam = getRequiredEnvParam;
function getTemporaryDirectory() {
const value = process.env["CODEQL_ACTION_TEMP"];
return value !== undefined && value !== ""
? value
: getRequiredEnvParam("RUNNER_TEMP");
}
exports.getTemporaryDirectory = getTemporaryDirectory;
/**
* Ensures all required environment variables are set in the context of a local run.
*/
function prepareLocalRunEnvironment() {
if (!util_1.isLocalRun()) {
return;
}
core.debug("Action is running locally.");
if (!process.env.GITHUB_JOB) {
core.exportVariable("GITHUB_JOB", "UNKNOWN-JOB");
}
if (!process.env.CODEQL_ACTION_ANALYSIS_KEY) {
core.exportVariable("CODEQL_ACTION_ANALYSIS_KEY", `LOCAL-RUN:${process.env.GITHUB_JOB}`);
}
}
exports.prepareLocalRunEnvironment = prepareLocalRunEnvironment;
/**
* Gets the SHA of the commit that is currently checked out.
*/
exports.getCommitOid = async function () {
// Try to use git to get the current commit SHA. If that fails then
// log but otherwise silently fall back to using the SHA from the environment.
// The only time these two values will differ is during analysis of a PR when
// the workflow has changed the current commit to the head commit instead of
// the merge commit, which must mean that git is available.
// Even if this does go wrong, it's not a huge problem for the alerts to
// reported on the merge commit.
try {
let commitOid = "";
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), ["rev-parse", "HEAD"], {
silent: true,
listeners: {
stdout: (data) => {
commitOid += data.toString();
},
stderr: (data) => {
process.stderr.write(data);
},
},
}).exec();
return commitOid.trim();
}
catch (e) {
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
return getRequiredEnvParam("GITHUB_SHA");
}
};
function isObject(o) {
return o !== null && typeof o === "object";
}
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
function escapeRegExp(string) {
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
}
function patternToRegExp(value) {
return new RegExp(`^${value
.toString()
.split(GLOB_PATTERN)
.reduce(function (arr, cur) {
if (cur === "**") {
arr.push(".*?");
}
else if (cur === "*") {
arr.push("[^/]*?");
}
else if (cur) {
arr.push(escapeRegExp(cur));
}
return arr;
}, [])
.join("")}$`);
}
// this function should return true if patternA is a superset of patternB
// e.g: * is a superset of main-* but main-* is not a superset of *.
function patternIsSuperset(patternA, patternB) {
return patternToRegExp(patternA).test(patternB);
}
exports.patternIsSuperset = patternIsSuperset;
function branchesToArray(branches) {
if (typeof branches === "string") {
return [branches];
}
if (Array.isArray(branches)) {
if (branches.length === 0) {
return "**";
}
return branches;
}
return "**";
}
function toCodedErrors(errors) {
return Object.entries(errors).reduce((acc, [key, value]) => {
acc[key] = { message: value, code: key };
return acc;
}, {});
}
// code to send back via status report
// message to add as a warning annotation to the run
exports.WorkflowErrors = toCodedErrors({
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
});
function getWorkflowErrors(doc) {
var _a, _b, _c, _d, _e, _f, _g, _h;
const errors = [];
const jobName = process.env.GITHUB_JOB;
if (jobName) {
const job = (_b = (_a = doc) === null || _a === void 0 ? void 0 : _a.jobs) === null || _b === void 0 ? void 0 : _b[jobName];
const steps = (_c = job) === null || _c === void 0 ? void 0 : _c.steps;
if (Array.isArray(steps)) {
for (const step of steps) {
// this was advice that we used to give in the README
// we actually want to run the analysis on the merge commit
// to produce results that are more inline with expectations
// (i.e: this is what will happen if you merge this PR)
// and avoid some race conditions
if (((_d = step) === null || _d === void 0 ? void 0 : _d.run) === "git checkout HEAD^2") {
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
break;
}
}
}
}
let missingPush = false;
if (doc.on === undefined) {
// this is not a valid config
}
else if (typeof doc.on === "string") {
if (doc.on === "pull_request") {
missingPush = true;
}
}
else if (Array.isArray(doc.on)) {
const hasPush = doc.on.includes("push");
const hasPullRequest = doc.on.includes("pull_request");
if (hasPullRequest && !hasPush) {
missingPush = true;
}
}
else if (isObject(doc.on)) {
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
const hasPullRequest = Object.prototype.hasOwnProperty.call(doc.on, "pull_request");
if (!hasPush && hasPullRequest) {
missingPush = true;
}
if (hasPush && hasPullRequest) {
const paths = (_e = doc.on.push) === null || _e === void 0 ? void 0 : _e.paths;
// if you specify paths or paths-ignore you can end up with commits that have no baseline
// if they didn't change any files
// currently we cannot go back through the history and find the most recent baseline
if (Array.isArray(paths) && paths.length > 0) {
errors.push(exports.WorkflowErrors.PathsSpecified);
}
const pathsIgnore = (_f = doc.on.push) === null || _f === void 0 ? void 0 : _f["paths-ignore"];
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
}
}
// if doc.on.pull_request is null that means 'all branches'
// if doc.on.pull_request is undefined that means 'off'
// we only want to check for mismatched branches if pull_request is on.
if (doc.on.pull_request !== undefined) {
const push = branchesToArray((_g = doc.on.push) === null || _g === void 0 ? void 0 : _g.branches);
if (push !== "**") {
const pull_request = branchesToArray((_h = doc.on.pull_request) === null || _h === void 0 ? void 0 : _h.branches);
if (pull_request !== "**") {
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
if (difference.length > 0) {
// there are branches in pull_request that may not have a baseline
// because we are not building them on push
errors.push(exports.WorkflowErrors.MismatchedBranches);
}
}
else if (push.length > 0) {
// push is set up to run on a subset of branches
// and you could open a PR against a branch with no baseline
errors.push(exports.WorkflowErrors.MismatchedBranches);
}
}
}
}
if (missingPush) {
errors.push(exports.WorkflowErrors.MissingPushHook);
}
return errors;
}
exports.getWorkflowErrors = getWorkflowErrors;
async function validateWorkflow() {
let workflow;
try {
workflow = await getWorkflow();
}
catch (e) {
return `error: getWorkflow() failed: ${e.toString()}`;
}
let workflowErrors;
try {
workflowErrors = getWorkflowErrors(workflow);
}
catch (e) {
return `error: getWorkflowErrors() failed: ${e.toString()}`;
}
if (workflowErrors.length > 0) {
let message;
try {
message = formatWorkflowErrors(workflowErrors);
}
catch (e) {
return `error: formatWorkflowErrors() failed: ${e.toString()}`;
}
core.warning(message);
}
return formatWorkflowCause(workflowErrors);
}
exports.validateWorkflow = validateWorkflow;
function formatWorkflowErrors(errors) {
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
const errorsList = errors.map((e) => e.message).join(" ");
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
}
exports.formatWorkflowErrors = formatWorkflowErrors;
function formatWorkflowCause(errors) {
if (errors.length === 0) {
return undefined;
}
return errors.map((e) => e.code).join(",");
}
exports.formatWorkflowCause = formatWorkflowCause;
async function getWorkflow() {
const relativePath = await getWorkflowPath();
const absolutePath = path.join(getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath);
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
}
exports.getWorkflow = getWorkflow;
/**
* Get the path of the currently executing workflow.
*/
async function getWorkflowPath() {
if (util_1.isLocalRun()) {
return getRequiredEnvParam("WORKFLOW_PATH");
}
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
const owner = repo_nwo[0];
const repo = repo_nwo[1];
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
const apiClient = api.getActionsApiClient();
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
owner,
repo,
run_id,
});
const workflowUrl = runsResponse.data.workflow_url;
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
return workflowResponse.data.path;
}
/**
* Get the workflow run ID.
*/
function getWorkflowRunID() {
const workflowRunID = parseInt(getRequiredEnvParam("GITHUB_RUN_ID"), 10);
if (Number.isNaN(workflowRunID)) {
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
}
return workflowRunID;
}
exports.getWorkflowRunID = getWorkflowRunID;
/**
* Get the analysis key parameter for the current job.
*
* This will combine the workflow path and current job name.
* Computing this the first time requires making requests to
* the github API, but after that the result will be cached.
*/
async function getAnalysisKey() {
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
let analysisKey = process.env[analysisKeyEnvVar];
if (analysisKey !== undefined) {
return analysisKey;
}
const workflowPath = await getWorkflowPath();
const jobName = getRequiredEnvParam("GITHUB_JOB");
analysisKey = `${workflowPath}:${jobName}`;
core.exportVariable(analysisKeyEnvVar, analysisKey);
return analysisKey;
}
exports.getAnalysisKey = getAnalysisKey;
/**
* Get the ref currently being analyzed.
*/
async function getRef() {
// Will be in the form "refs/heads/master" on a push event
// or in the form "refs/pull/N/merge" on a pull_request event
const ref = getRequiredEnvParam("GITHUB_REF");
// For pull request refs we want to detect whether the workflow
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
// than the 'merge' ref. If so, we want to convert the ref that
// we report back.
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
const checkoutSha = await exports.getCommitOid();
if (pull_ref_regex.test(ref) &&
checkoutSha !== getRequiredEnvParam("GITHUB_SHA")) {
return ref.replace(pull_ref_regex, "refs/pull/$1/head");
}
else {
return ref;
}
}
exports.getRef = getRef;
/**
* Compose a StatusReport.
*
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
* @param status The status. Must be 'success', 'failure', or 'starting'
* @param startedAt The time this action started executing.
* @param cause Cause of failure (only supply if status is 'failure')
* @param exception Exception (only supply if status is 'failure')
*/
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
const commitOid = process.env["GITHUB_SHA"] || "";
const ref = await getRef();
const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
let workflowRunID = -1;
if (workflowRunIDStr) {
workflowRunID = parseInt(workflowRunIDStr, 10);
}
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
const jobName = process.env["GITHUB_JOB"] || "";
const analysis_key = await getAnalysisKey();
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
if (workflowStartedAt === undefined) {
workflowStartedAt = actionStartedAt.toISOString();
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
}
// If running locally then the GITHUB_ACTION_REF cannot be trusted as it may be for the previous action
// See https://github.com/actions/runner/issues/803
const actionRef = isRunningLocalAction()
? undefined
: process.env["GITHUB_ACTION_REF"];
const statusReport = {
workflow_run_id: workflowRunID,
workflow_name: workflowName,
job_name: jobName,
analysis_key,
commit_oid: commitOid,
ref,
action_name: actionName,
action_ref: actionRef,
action_oid: "unknown",
started_at: workflowStartedAt,
action_started_at: actionStartedAt.toISOString(),
status,
};
// Add optional parameters
if (cause) {
statusReport.cause = cause;
}
if (exception) {
statusReport.exception = exception;
}
if (status === "success" || status === "failure" || status === "aborted") {
statusReport.completed_at = new Date().toISOString();
}
const matrix = getRequiredInput("matrix");
if (matrix) {
statusReport.matrix_vars = matrix;
}
return statusReport;
}
exports.createStatusReportBase = createStatusReportBase;
function isHTTPError(arg) {
var _a;
return ((_a = arg) === null || _a === void 0 ? void 0 : _a.status) !== undefined && Number.isInteger(arg.status);
}
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
const GENERIC_404_MSG = "Not authorized to used the CodeQL code scanning feature on this repo.";
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
/**
* Send a status report to the code_scanning/analysis/status endpoint.
*
* Optionally checks the response from the API endpoint and sets the action
* as failed if the status report failed. This is only expected to be used
* when sending a 'starting' report.
*
* Returns whether sending the status report was successful of not.
*/
async function sendStatusReport(statusReport) {
if (util_1.isLocalRun()) {
core.debug("Not sending status report because this is a local run");
return true;
}
const statusReportJSON = JSON.stringify(statusReport);
core.debug(`Sending status report: ${statusReportJSON}`);
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
const [owner, repo] = nwo.split("/");
const client = api.getActionsApiClient();
try {
await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
owner,
repo,
data: statusReportJSON,
});
return true;
}
catch (e) {
console.log(e);
if (isHTTPError(e)) {
switch (e.status) {
case 403:
core.setFailed(e.message || GENERIC_403_MSG);
return false;
case 404:
core.setFailed(GENERIC_404_MSG);
return false;
case 422:
// schema incompatibility when reporting status
// this means that this action version is no longer compatible with the API
// we still want to continue as it is likely the analysis endpoint will work
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
core.debug(INCOMPATIBLE_MSG);
}
else {
core.debug(OUT_OF_DATE_MSG);
}
return true;
}
}
// something else has gone wrong and the request/response will be logged by octokit
// it's possible this is a transient error and we should continue scanning
core.error("An unexpected error occurred when sending code scanning status report.");
return true;
}
}
exports.sendStatusReport = sendStatusReport;
// Is the current action executing a local copy (i.e. we're running a workflow on the codeql-action repo itself)
// as opposed to running a remote action (i.e. when another repo references us)
function isRunningLocalAction() {
const relativeScriptPath = getRelativeScriptPath();
return (relativeScriptPath.startsWith("..") || path.isAbsolute(relativeScriptPath));
}
exports.isRunningLocalAction = isRunningLocalAction;
// Get the location where the action is running from.
// This can be used to get the actions name or tell if we're running a local action.
function getRelativeScriptPath() {
const runnerTemp = getRequiredEnvParam("RUNNER_TEMP");
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
return path.relative(actionsDirectory, __filename);
}
exports.getRelativeScriptPath = getRelativeScriptPath;
//# sourceMappingURL=actions-util.js.map

File diff suppressed because one or more lines are too long

402
lib/actions-util.test.js generated
View File

@@ -1,402 +0,0 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const yaml = __importStar(require("js-yaml"));
const sinon_1 = __importDefault(require("sinon"));
const actionsutil = __importStar(require("./actions-util"));
const testing_utils_1 = require("./testing-utils");
function errorCodes(actual, expected) {
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
}
testing_utils_1.setupTests(ava_1.default);
ava_1.default("getRef() throws on the empty string", async (t) => {
process.env["GITHUB_REF"] = "";
await t.throwsAsync(actionsutil.getRef);
});
ava_1.default("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
const expectedRef = "refs/pull/1/merge";
const currentSha = "a".repeat(40);
process.env["GITHUB_REF"] = expectedRef;
process.env["GITHUB_SHA"] = currentSha;
sinon_1.default.stub(actionsutil, "getCommitOid").resolves(currentSha);
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, expectedRef);
});
ava_1.default("getRef() returns head PR ref if GITHUB_SHA not currently checked out", async (t) => {
process.env["GITHUB_REF"] = "refs/pull/1/merge";
process.env["GITHUB_SHA"] = "a".repeat(40);
sinon_1.default.stub(actionsutil, "getCommitOid").resolves("b".repeat(40));
const actualRef = await actionsutil.getRef();
t.deepEqual(actualRef, "refs/pull/1/head");
});
ava_1.default("getAnalysisKey() when a local run", async (t) => {
process.env.CODEQL_LOCAL_RUN = "true";
process.env.CODEQL_ACTION_ANALYSIS_KEY = "";
process.env.GITHUB_JOB = "";
actionsutil.prepareLocalRunEnvironment();
const actualAnalysisKey = await actionsutil.getAnalysisKey();
t.deepEqual(actualAnalysisKey, "LOCAL-RUN:UNKNOWN-JOB");
});
ava_1.default("prepareEnvironment() when a local run", (t) => {
process.env.CODEQL_LOCAL_RUN = "false";
process.env.GITHUB_JOB = "YYY";
process.env.CODEQL_ACTION_ANALYSIS_KEY = "TEST";
actionsutil.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, "YYY");
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "TEST");
process.env.CODEQL_LOCAL_RUN = "true";
actionsutil.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, "YYY");
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "TEST");
process.env.CODEQL_ACTION_ANALYSIS_KEY = "";
actionsutil.prepareLocalRunEnvironment();
// updated
t.deepEqual(process.env.GITHUB_JOB, "YYY");
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:YYY");
process.env.GITHUB_JOB = "";
process.env.CODEQL_ACTION_ANALYSIS_KEY = "";
actionsutil.prepareLocalRunEnvironment();
// updated
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
});
ava_1.default("getWorkflowErrors() when on is empty", (t) => {
const errors = actionsutil.getWorkflowErrors({ on: {} });
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() when on.push is an array missing push", (t) => {
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook]));
});
ava_1.default("getWorkflowErrors() when on.push is valid", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: ["push", "pull_request"],
});
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() when on.push is a valid superset", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: ["push", "pull_request", "schedule"],
});
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() when on.push should not have a path", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: {
push: { branches: ["main"], paths: ["test/*"] },
pull_request: { branches: ["main"] },
},
});
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified]));
});
ava_1.default("getWorkflowErrors() when on.push is a correct object", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
});
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() when on.pull_requests is a string", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
});
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
});
ava_1.default("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
});
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
on:
push:
pull_request:
`));
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() when on.push is mismatched", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: {
push: { branches: ["main"] },
pull_request: { branches: ["feature"] },
},
});
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
});
ava_1.default("getWorkflowErrors() when on.push is not mismatched", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: {
push: { branches: ["main", "feature"] },
pull_request: { branches: ["main"] },
},
});
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: {
push: { branches: ["main"] },
pull_request: { branches: ["main", "feature"] },
},
});
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
});
ava_1.default("getWorkflowErrors() for a range of malformed workflows", (t) => {
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: {
push: 1,
pull_request: 1,
},
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: 1,
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: [1],
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: { 1: 1 },
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: 1 },
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: [1] },
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: { steps: 1 } },
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: 1,
jobs: { test: [undefined] },
}), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
on: {
push: {
branches: 1,
},
pull_request: {
branches: 1,
},
},
}), []));
});
ava_1.default("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on:
push:
branches: ["main"]
pull_request:
`));
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
});
ava_1.default("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: {
push: { branches: ["feature/*"] },
pull_request: { branches: "feature/moose" },
},
});
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
const errors = actionsutil.getWorkflowErrors({
on: {
push: { branches: ["feature/moose"] },
pull_request: { branches: "feature/*" },
},
});
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
});
ava_1.default("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
process.env.GITHUB_JOB = "test";
const errors = actionsutil.getWorkflowErrors({
on: ["push", "pull_request"],
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
});
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
});
ava_1.default("formatWorkflowErrors() when there is one error", (t) => {
const message = actionsutil.formatWorkflowErrors([
actionsutil.WorkflowErrors.CheckoutWrongHead,
]);
t.true(message.startsWith("1 issue was detected with this workflow:"));
});
ava_1.default("formatWorkflowErrors() when there are multiple errors", (t) => {
const message = actionsutil.formatWorkflowErrors([
actionsutil.WorkflowErrors.CheckoutWrongHead,
actionsutil.WorkflowErrors.PathsSpecified,
]);
t.true(message.startsWith("2 issues were detected with this workflow:"));
});
ava_1.default("formatWorkflowCause() with no errors", (t) => {
const message = actionsutil.formatWorkflowCause([]);
t.deepEqual(message, undefined);
});
ava_1.default("formatWorkflowCause()", (t) => {
const message = actionsutil.formatWorkflowCause([
actionsutil.WorkflowErrors.CheckoutWrongHead,
actionsutil.WorkflowErrors.PathsSpecified,
]);
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
});
ava_1.default("patternIsSuperset()", (t) => {
t.false(actionsutil.patternIsSuperset("main-*", "main"));
t.true(actionsutil.patternIsSuperset("*", "*"));
t.true(actionsutil.patternIsSuperset("*", "main-*"));
t.false(actionsutil.patternIsSuperset("main-*", "*"));
t.false(actionsutil.patternIsSuperset("main-*", "main"));
t.true(actionsutil.patternIsSuperset("main", "main"));
t.false(actionsutil.patternIsSuperset("*", "feature/*"));
t.true(actionsutil.patternIsSuperset("**", "feature/*"));
t.false(actionsutil.patternIsSuperset("feature-*", "**"));
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**/d"));
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**"));
t.true(actionsutil.patternIsSuperset("a/**", "a/**/c"));
t.true(actionsutil.patternIsSuperset("a/**/c", "a/main-**/c"));
t.false(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/d/**/c"));
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/c/**/c"));
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/d/**/c"));
t.false(actionsutil.patternIsSuperset("a/**/c/d/**/c", "a/**/b/**/c"));
t.false(actionsutil.patternIsSuperset("a/main-**/c", "a/**/c"));
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
});
ava_1.default("getWorkflowErrors() when branches contain dots", (t) => {
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
on:
push:
branches: [4.1, master]
pull_request:
# The branches below must be a subset of the branches above
branches: [4.1, master]
`));
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() when on.push has a trailing comma", (t) => {
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on:
push:
branches: [master, ]
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
`));
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
process.env.GITHUB_JOB = "test";
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on:
push:
branches: [master]
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
jobs:
test:
steps:
- run: "git checkout HEAD^2"
test2:
steps:
- run: "git checkout HEAD^2"
test3:
steps: []
`));
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
});
ava_1.default("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
process.env.GITHUB_JOB = "test3";
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on:
push:
branches: [master]
pull_request:
# The branches below must be a subset of the branches above
branches: [master]
jobs:
test:
steps:
- run: "git checkout HEAD^2"
test2:
steps:
- run: "git checkout HEAD^2"
test3:
steps: []
`));
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() when on is missing", (t) => {
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
`));
t.deepEqual(...errorCodes(errors, []));
});
ava_1.default("getWorkflowErrors() with a different on setup", (t) => {
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on: "workflow_dispatch"
`)), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on: [workflow_dispatch]
`)), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on:
workflow_dispatch: {}
`)), []));
});
ava_1.default("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on:
push:
branches: [master]
`)), []));
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
name: "CodeQL"
on: ["push"]
`)), []));
});
//# sourceMappingURL=actions-util.test.js.map

File diff suppressed because one or more lines are too long

26
lib/analysis-paths.js generated
View File

@@ -1,18 +1,10 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const path = __importStar(require("path"));
function isInterpretedLanguage(language) {
return language === "javascript" || language === "python";
}
// Matches a string containing only characters that are legal to include in paths on windows.
exports.legalWindowsPathCharactersRegex = /^[^<>:"|?]*$/;
exports.legalWindowsPathCharactersRegex = /^[^<>:"\|?]*$/;
// Builds an environment variable suitable for LGTM_INDEX_INCLUDE or LGTM_INDEX_EXCLUDE
function buildIncludeExcludeEnvVar(paths) {
// Ignore anything containing a *
@@ -28,7 +20,7 @@ function printPathFiltersWarning(config, logger) {
// If any other languages are detected/configured then show a warning.
if ((config.paths.length !== 0 || config.pathsIgnore.length !== 0) &&
!config.languages.every(isInterpretedLanguage)) {
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for JavaScript and Python');
logger.warning('The "paths"/"paths-ignore" fields of the config only have effect for Javascript and Python');
}
}
exports.printPathFiltersWarning = printPathFiltersWarning;
@@ -43,18 +35,8 @@ function includeAndExcludeAnalysisPaths(config) {
if (config.paths.length !== 0) {
process.env["LGTM_INDEX_INCLUDE"] = buildIncludeExcludeEnvVar(config.paths);
}
// If the temporary or tools directory is in the working directory ignore that too.
const tempRelativeToWorking = path.relative(process.cwd(), config.tempDir);
const toolsRelativeToWorking = path.relative(process.cwd(), config.toolCacheDir);
let pathsIgnore = config.pathsIgnore;
if (!tempRelativeToWorking.startsWith("..")) {
pathsIgnore = pathsIgnore.concat(tempRelativeToWorking);
}
if (!toolsRelativeToWorking.startsWith("..")) {
pathsIgnore = pathsIgnore.concat(toolsRelativeToWorking);
}
if (pathsIgnore.length !== 0) {
process.env["LGTM_INDEX_EXCLUDE"] = buildIncludeExcludeEnvVar(pathsIgnore);
if (config.pathsIgnore.length !== 0) {
process.env["LGTM_INDEX_EXCLUDE"] = buildIncludeExcludeEnvVar(config.pathsIgnore);
}
// The 'LGTM_INDEX_FILTERS' environment variable controls which files are
// extracted or ignored. It does not control which directories are traversed.

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;;;;;;;;AAAA,2CAA6B;AAK7B,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,cAAc,CAAC;AAE9D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,mFAAmF;IACnF,MAAM,qBAAqB,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;IAC3E,MAAM,sBAAsB,GAAG,IAAI,CAAC,QAAQ,CAC1C,OAAO,CAAC,GAAG,EAAE,EACb,MAAM,CAAC,YAAY,CACpB,CAAC;IACF,IAAI,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrC,IAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC3C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KACzD;IACD,IAAI,CAAC,sBAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;QAC5C,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;KAC1D;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,WAAW,CAAC,CAAC;KAC5E;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AArCD,wEAqCC"}
{"version":3,"file":"analysis-paths.js","sourceRoot":"","sources":["../src/analysis-paths.ts"],"names":[],"mappings":";;AAGA,SAAS,qBAAqB,CAAC,QAAQ;IACrC,OAAO,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,QAAQ,CAAC;AAC5D,CAAC;AAED,6FAA6F;AAChF,QAAA,+BAA+B,GAAG,eAAe,CAAC;AAE/D,uFAAuF;AACvF,SAAS,yBAAyB,CAAC,KAAe;IAChD,iCAAiC;IACjC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEnD,uDAAuD;IACvD,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;QAChC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,uCAA+B,CAAC,CAAC,CAAC;KACvE;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,oEAAoE;IACpE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,CAAC;QAC9D,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,qBAAqB,CAAC,EAC9C;QACA,MAAM,CAAC,OAAO,CACZ,4FAA4F,CAC7F,CAAC;KACH;AACH,CAAC;AAdD,0DAcC;AAED,SAAgB,8BAA8B,CAAC,MAA0B;IACvE,0EAA0E;IAC1E,+DAA+D;IAC/D,sEAAsE;IACtE,qDAAqD;IACrD,gFAAgF;IAChF,sEAAsE;IACtE,sDAAsD;IACtD,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAC7B,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAC7E;IACD,IAAI,MAAM,CAAC,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QACnC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,yBAAyB,CAC3D,MAAM,CAAC,WAAW,CACnB,CAAC;KACH;IAED,yEAAyE;IACzE,6EAA6E;IAC7E,wDAAwD;IACxD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/D,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACxD;AACH,CAAC;AA1BD,wEA0BC"}

View File

@@ -1,4 +1,7 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
@@ -6,11 +9,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const path = __importStar(require("path"));
const ava_1 = __importDefault(require("ava"));
const analysisPaths = __importStar(require("./analysis-paths"));
const testing_utils_1 = require("./testing-utils");
@@ -27,7 +26,6 @@ ava_1.default("emptyPaths", async (t) => {
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
@@ -46,7 +44,6 @@ ava_1.default("nonEmptyPaths", async (t) => {
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], "path1\npath2");
@@ -54,24 +51,4 @@ ava_1.default("nonEmptyPaths", async (t) => {
t.is(process.env["LGTM_INDEX_FILTERS"], "include:path1\ninclude:path2\ninclude:**/path3\nexclude:path4\nexclude:path5\nexclude:path6/**");
});
});
ava_1.default("exclude temp dir", async (t) => {
return await util.withTmpDir(async (toolCacheDir) => {
const tempDir = path.join(process.cwd(), "codeql-runner-temp");
const config = {
languages: [],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir,
toolCacheDir,
codeQLCmd: "",
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
};
analysisPaths.includeAndExcludeAnalysisPaths(config);
t.is(process.env["LGTM_INDEX_INCLUDE"], undefined);
t.is(process.env["LGTM_INDEX_EXCLUDE"], "codeql-runner-temp");
t.is(process.env["LGTM_INDEX_FILTERS"], undefined);
});
});
//# sourceMappingURL=analysis-paths.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;SACzE,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;SACzE,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACnC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE;QAClD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,oBAAoB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO;YACP,YAAY;YACZ,SAAS,EAAE,EAAE;YACb,aAAa,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAwB;SACzE,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,oBAAoB,CAAC,CAAC;QAC9D,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7B,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,WAAW,EAAE,EAAE;YACf,KAAK,EAAE,EAAE;YACT,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACrD,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG;YACb,SAAS,EAAE,EAAE;YACb,OAAO,EAAE,EAAE;YACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;YAC3C,iBAAiB,EAAE,EAAE;YACrB,OAAO,EAAE,MAAM;YACf,YAAY,EAAE,MAAM;YACpB,SAAS,EAAE,EAAE;SACd,CAAC;QACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;QACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;QACxD,CAAC,CAAC,EAAE,CACF,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EACjC,gGAAgG,CACjG,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

71
lib/analyze-action.js generated
View File

@@ -7,98 +7,49 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const analyze_1 = require("./analyze");
const config_utils_1 = require("./config-utils");
const logging_1 = require("./logging");
const upload_lib = __importStar(require("./upload-lib"));
const repository_1 = require("./repository");
const util = __importStar(require("./util"));
async function sendStatusReport(startedAt, stats, error) {
var _a, _b, _c;
const status = ((_a = stats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined
? "failure"
: "success";
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
const statusReportBase = await util.createStatusReportBase("finish", status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
const statusReport = {
...statusReportBase,
...(stats || {}),
};
await actionsUtil.sendStatusReport(statusReport);
await util.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
let stats = undefined;
let config = undefined;
try {
actionsUtil.prepareLocalRunEnvironment();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
util.prepareLocalRunEnvironment();
if (!(await util.sendStatusReport(await util.createStatusReportBase("finish", "starting", startedAt), true))) {
return;
}
const logger = logging_1.getActionsLogger();
config = await config_utils_1.getConfig(actionsUtil.getTemporaryDirectory(), logger);
const config = await config_utils_1.getConfig(util.getRequiredEnvParam("RUNNER_TEMP"), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
}
const apiDetails = {
auth: actionsUtil.getRequiredInput("token"),
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
};
const outputDir = actionsUtil.getRequiredInput("output");
const queriesStats = await analyze_1.runAnalyze(outputDir, util.getMemoryFlag(actionsUtil.getOptionalInput("ram")), util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), util.getThreadsFlag(actionsUtil.getOptionalInput("threads"), logger), config, logger);
if (actionsUtil.getRequiredInput("upload") === "true") {
const uploadStats = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
stats = { ...queriesStats, ...uploadStats };
}
else {
logger.info("Not uploading results");
stats = { ...queriesStats };
}
stats = await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await util.getCommitOid(), util.getRef(), await util.getAnalysisKey(), util.getRequiredEnvParam("GITHUB_WORKFLOW"), util.getWorkflowRunID(), util.getRequiredInput("checkout_path"), util.getOptionalInput("matrix"), util.getRequiredInput("token"), util.getRequiredEnvParam("GITHUB_SERVER_URL"), util.getOptionalInput("upload") === "true", "actions", util.getRequiredInput("output"), util.getMemoryFlag(util.getOptionalInput("ram")), util.getAddSnippetsFlag(util.getOptionalInput("add-snippets")), util.getThreadsFlag(util.getOptionalInput("threads"), logger), config, logger);
}
catch (error) {
core.setFailed(error.message);
console.log(error);
if (error instanceof analyze_1.CodeQLAnalysisError) {
stats = { ...error.queriesStatusReport };
}
await sendStatusReport(startedAt, stats, error);
return;
}
finally {
if (core.isDebug() && config !== undefined) {
core.info("Debug mode is on. Printing CodeQL debug logs...");
for (const language of config.languages) {
const databaseDirectory = util.getCodeQLDatabasePath(config.tempDir, language);
const logsDirectory = path.join(databaseDirectory, "log");
const walkLogFiles = (dir) => {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile()) {
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
core.endGroup();
}
else if (entry.isDirectory()) {
walkLogFiles(path.resolve(dir, entry.name));
}
}
};
walkLogFiles(logsDirectory);
}
}
}
await sendStatusReport(startedAt, stats);
}
async function runWrapper() {
try {
await run();
}
catch (error) {
core.setFailed(`analyze action failed: ${error}`);
console.log(error);
}
}
void runWrapper();
run().catch((e) => {
core.setFailed(`analyze action failed: ${e}`);
console.log(e);
});
//# sourceMappingURL=analyze-action.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAImB;AACnB,iDAAmD;AACnD,uCAA6C;AAC7C,yDAA2C;AAC3C,6CAA+B;AAU/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI,MAAM,GAAuB,SAAS,CAAC;IAC3C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,QAAQ,EACR,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,GAAG,MAAM,wBAAS,CAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;QACtE,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QACF,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,YAAY,GAAG,MAAM,oBAAU,CACnC,SAAS,EACT,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EACvD,IAAI,CAAC,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EACrE,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EACpE,MAAM,EACN,MAAM,CACP,CAAC;QAEF,IAAI,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EAAE;YACrD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,SAAS,EACT,MAAM,CAAC,aAAa,EACpB,UAAU,EACV,MAAM,CACP,CAAC;YACF,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;SAC7C;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;YACrC,KAAK,GAAG,EAAE,GAAG,YAAY,EAAE,CAAC;SAC7B;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAEnB,IAAI,KAAK,YAAY,6BAAmB,EAAE;YACxC,KAAK,GAAG,EAAE,GAAG,KAAK,CAAC,mBAAmB,EAAE,CAAC;SAC1C;QAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;YAAS;QACR,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,MAAM,KAAK,SAAS,EAAE;YAC1C,IAAI,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;YAC7D,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,MAAM,iBAAiB,GAAG,IAAI,CAAC,qBAAqB,CAClD,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;gBACF,MAAM,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,iBAAiB,EAAE,KAAK,CAAC,CAAC;gBAE1D,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE;oBACnC,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;oBAC7D,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE;wBAC3B,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;4BAClB,IAAI,CAAC,UAAU,CACb,uBAAuB,QAAQ,MAAM,KAAK,CAAC,IAAI,EAAE,CAClD,CAAC;4BACF,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAC/C,CAAC;4BACF,IAAI,CAAC,QAAQ,EAAE,CAAC;yBACjB;6BAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;4BAC9B,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;yBAC7C;qBACF;gBACH,CAAC,CAAC;gBACF,YAAY,CAAC,aAAa,CAAC,CAAC;aAC7B;SACF;KACF;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,0BAA0B,KAAK,EAAE,CAAC,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"analyze-action.js","sourceRoot":"","sources":["../src/analyze-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,uCAA6D;AAC7D,iDAA2C;AAC3C,uCAA6C;AAC7C,6CAAkD;AAClD,6CAA+B;AAM/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,KAAuC,EACvC,KAAa;;IAEb,MAAM,MAAM,GACV,OAAA,KAAK,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClE,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,QAAQ,EACR,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,KAAK,IAAI,EAAE,CAAC;KACjB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,KAAK,GAAqC,SAAS,CAAC;IACxD,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IACE,CAAC,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAC3B,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,UAAU,EAAE,SAAS,CAAC,EAClE,IAAI,CACL,CAAC,EACF;YACA,OAAO;SACR;QACD,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;QAClC,MAAM,MAAM,GAAG,MAAM,wBAAS,CAC5B,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EACvC,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,KAAK,GAAG,MAAM,oBAAU,CACtB,+BAAkB,CAAC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACjE,MAAM,IAAI,CAAC,YAAY,EAAE,EACzB,IAAI,CAAC,MAAM,EAAE,EACb,MAAM,IAAI,CAAC,cAAc,EAAE,EAC3B,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAC3C,IAAI,CAAC,gBAAgB,EAAE,EACvB,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC,EACtC,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EAC/B,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC,EAC9B,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC,KAAK,MAAM,EAC1C,SAAS,EACT,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EAC/B,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,EAChD,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EAC9D,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC,EAC7D,MAAM,EACN,MAAM,CACP,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAChD,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;AAC3C,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,0BAA0B,CAAC,EAAE,CAAC,CAAC;IAC9C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

103
lib/analyze.js generated
View File

@@ -9,45 +9,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql");
const languages_1 = require("./languages");
const sharedEnv = __importStar(require("./shared-environment"));
const upload_lib = __importStar(require("./upload-lib"));
const util = __importStar(require("./util"));
class CodeQLAnalysisError extends Error {
constructor(queriesStatusReport, message) {
super(message);
this.name = "CodeQLAnalysisError";
this.queriesStatusReport = queriesStatusReport;
}
}
exports.CodeQLAnalysisError = CodeQLAnalysisError;
async function setupPythonExtractor(logger) {
const codeqlPython = process.env["CODEQL_PYTHON"];
if (codeqlPython === undefined || codeqlPython.length === 0) {
// If CODEQL_PYTHON is not set, no dependencies were installed, so we don't need to do anything
return;
}
let output = "";
const options = {
listeners: {
stdout: (data) => {
output += data.toString();
},
},
};
await new toolrunner.ToolRunner(codeqlPython, [
"-c",
"import os; import pip; print(os.path.dirname(os.path.dirname(pip.__file__)))",
], options).exec();
logger.info(`Setting LGTM_INDEX_IMPORT_PATH=${output}`);
process.env["LGTM_INDEX_IMPORT_PATH"] = output;
output = "";
await new toolrunner.ToolRunner(codeqlPython, ["-c", "import sys; print(sys.version_info[0])"], options).exec();
logger.info(`Setting LGTM_PYTHON_SETUP_VERSION=${output}`);
process.env["LGTM_PYTHON_SETUP_VERSION"] = output;
}
async function createdDBForScannedLanguages(config, logger) {
// Insert the LGTM_INDEX_X env vars at this point so they are set when
// we extract any scanned languages.
@@ -56,75 +23,65 @@ async function createdDBForScannedLanguages(config, logger) {
for (const language of config.languages) {
if (languages_1.isScannedLanguage(language)) {
logger.startGroup(`Extracting ${language}`);
if (language === languages_1.Language.python) {
await setupPythonExtractor(logger);
}
await codeql.extractScannedLanguage(util.getCodeQLDatabasePath(config.tempDir, language), language);
logger.endGroup();
}
}
}
async function finalizeDatabaseCreation(config, threadsFlag, logger) {
async function finalizeDatabaseCreation(config, logger) {
await createdDBForScannedLanguages(config, logger);
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
logger.startGroup(`Finalizing ${language}`);
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config.tempDir, language), threadsFlag);
await codeql.finalizeDatabase(util.getCodeQLDatabasePath(config.tempDir, language));
logger.endGroup();
}
}
// Runs queries and creates sarif files in the given folder
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
const statusReport = {};
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
for (const language of config.languages) {
logger.startGroup(`Analyzing ${language}`);
const queries = config.queries[language];
if (queries.builtin.length === 0 && queries.custom.length === 0) {
const queries = config.queries[language] || [];
if (queries.length === 0) {
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
}
try {
for (const type of ["builtin", "custom"]) {
if (queries[type].length > 0) {
const startTime = new Date().getTime();
const databasePath = util.getCodeQLDatabasePath(config.tempDir, language);
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuitePath = `${databasePath}-queries-${type}.qls`;
const querySuiteContents = queries[type]
.map((q) => `- query: ${q}`)
.join("\n");
fs.writeFileSync(querySuitePath, querySuiteContents);
logger.debug(`Query suite file for ${language}...\n${querySuiteContents}`);
const sarifFile = path.join(sarifFolder, `${language}-${type}.sarif`);
const codeql = codeql_1.getCodeQL(config.codeQLCmd);
await codeql.databaseAnalyze(databasePath, sarifFile, querySuitePath, memoryFlag, addSnippetsFlag, threadsFlag);
logger.debug(`SARIF results for database ${language} created at "${sarifFile}"`);
logger.endGroup();
// Record the performance
const endTime = new Date().getTime();
statusReport[`analyze_${type}_queries_${language}_duration_ms`] =
endTime - startTime;
}
}
const databasePath = util.getCodeQLDatabasePath(config.tempDir, language);
// Pass the queries to codeql using a file instead of using the command
// line to avoid command line length restrictions, particularly on windows.
const querySuite = `${databasePath}-queries.qls`;
const querySuiteContents = queries.map((q) => `- query: ${q}`).join("\n");
fs.writeFileSync(querySuite, querySuiteContents);
logger.debug(`Query suite file for ${language}...\n${querySuiteContents}`);
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
await codeql.databaseAnalyze(databasePath, sarifFile, querySuite, memoryFlag, addSnippetsFlag, threadsFlag);
logger.debug(`SARIF results for database ${language} created at "${sarifFile}"`);
logger.endGroup();
}
catch (e) {
logger.info(e);
statusReport.analyze_failure_language = language;
throw new CodeQLAnalysisError(statusReport, `Error running analysis for ${language}: ${e}`);
// For now the fields about query performance are not populated
return {
analyze_failure_language: language,
};
}
}
return statusReport;
return {};
}
exports.runQueries = runQueries;
async function runAnalyze(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
async function runAnalyze(repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, doUpload, mode, outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger) {
// Delete the tracer config env var to avoid tracing ourselves
delete process.env[sharedEnv.ODASA_TRACER_CONFIGURATION];
fs.mkdirSync(outputDir, { recursive: true });
logger.info("Finalizing database creation");
await finalizeDatabaseCreation(config, threadsFlag, logger);
await finalizeDatabaseCreation(config, logger);
logger.info("Analyzing database");
const queriesStats = await runQueries(outputDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logger);
return { ...queriesStats };
if (!doUpload) {
logger.info("Not uploading results");
return { ...queriesStats };
}
const uploadStats = await upload_lib.upload(outputDir, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger);
return { ...queriesStats, ...uploadStats };
}
exports.runAnalyze = runAnalyze;
//# sourceMappingURL=analyze.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAE3D,gEAAkD;AAClD,qCAAqC;AAErC,2CAA0D;AAE1D,gEAAkD;AAClD,6CAA+B;AAE/B,MAAa,mBAAoB,SAAQ,KAAK;IAG5C,YAAY,mBAAwC,EAAE,OAAe;QACnE,KAAK,CAAC,OAAO,CAAC,CAAC;QAEf,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC;QAClC,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;CACF;AATD,kDASC;AA+BD,KAAK,UAAU,oBAAoB,CAAC,MAAc;IAChD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IAClD,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;QAC3D,+FAA+F;QAC/F,OAAO;KACR;IAED,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,OAAO,GAAG;QACd,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC;IAEF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ;QACE,IAAI;QACJ,8EAA8E;KAC/E,EACD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,kCAAkC,MAAM,EAAE,CAAC,CAAC;IACxD,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;IAE/C,MAAM,GAAG,EAAE,CAAC;IACZ,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,YAAY,EACZ,CAAC,IAAI,EAAE,wCAAwC,CAAC,EAChD,OAAO,CACR,CAAC,IAAI,EAAE,CAAC;IACT,MAAM,CAAC,IAAI,CAAC,qCAAqC,MAAM,EAAE,CAAC,CAAC;IAC3D,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC,GAAG,MAAM,CAAC;AACpD,CAAC;AAED,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;YAE5C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,MAAM,EAAE;gBAChC,MAAM,oBAAoB,CAAC,MAAM,CAAC,CAAC;aACpC;YAED,MAAM,MAAM,CAAC,sBAAsB,CACjC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,CACT,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,WAAmB,EACnB,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAC3B,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,WAAW,CACZ,CAAC;QACF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AACpD,KAAK,UAAU,UAAU,CAC9B,WAAmB,EACnB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,YAAY,GAAwB,EAAE,CAAC;IAE7C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,QAAQ,EAAE,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QACzC,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;YAC/D,MAAM,IAAI,KAAK,CACb,qBAAqB,QAAQ,gDAAgD,CAC9E,CAAC;SACH;QAED,IAAI;YACF,KAAK,MAAM,IAAI,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,EAAE;gBACxC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBAEvC,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAC7C,MAAM,CAAC,OAAO,EACd,QAAQ,CACT,CAAC;oBACF,uEAAuE;oBACvE,2EAA2E;oBAC3E,MAAM,cAAc,GAAG,GAAG,YAAY,YAAY,IAAI,MAAM,CAAC;oBAC7D,MAAM,kBAAkB,GAAG,OAAO,CAAC,IAAI,CAAC;yBACrC,GAAG,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC;yBACnC,IAAI,CAAC,IAAI,CAAC,CAAC;oBACd,EAAE,CAAC,aAAa,CAAC,cAAc,EAAE,kBAAkB,CAAC,CAAC;oBACrD,MAAM,CAAC,KAAK,CACV,wBAAwB,QAAQ,QAAQ,kBAAkB,EAAE,CAC7D,CAAC;oBAEF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,GAAG,QAAQ,IAAI,IAAI,QAAQ,CAAC,CAAC;oBAEtE,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;oBAC3C,MAAM,MAAM,CAAC,eAAe,CAC1B,YAAY,EACZ,SAAS,EACT,cAAc,EACd,UAAU,EACV,eAAe,EACf,WAAW,CACZ,CAAC;oBAEF,MAAM,CAAC,KAAK,CACV,8BAA8B,QAAQ,gBAAgB,SAAS,GAAG,CACnE,CAAC;oBACF,MAAM,CAAC,QAAQ,EAAE,CAAC;oBAElB,yBAAyB;oBACzB,MAAM,OAAO,GAAG,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC;oBACrC,YAAY,CAAC,WAAW,IAAI,YAAY,QAAQ,cAAc,CAAC;wBAC7D,OAAO,GAAG,SAAS,CAAC;iBACvB;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YACf,YAAY,CAAC,wBAAwB,GAAG,QAAQ,CAAC;YACjD,MAAM,IAAI,mBAAmB,CAC3B,YAAY,EACZ,8BAA8B,QAAQ,KAAK,CAAC,EAAE,CAC/C,CAAC;SACH;KACF;IAED,OAAO,YAAY,CAAC;AACtB,CAAC;AA1ED,gCA0EC;AAEM,KAAK,UAAU,UAAU,CAC9B,SAAiB,EACjB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;IAE5D,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CACnC,SAAS,EACT,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,MAAM,CACP,CAAC;IAEF,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;AAC7B,CAAC;AA3BD,gCA2BC"}
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,gEAAkD;AAClD,qCAAqC;AAErC,2CAAgD;AAGhD,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAmC/B,KAAK,UAAU,4BAA4B,CACzC,MAA0B,EAC1B,MAAc;IAEd,sEAAsE;IACtE,oCAAoC;IACpC,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,6BAAiB,CAAC,QAAQ,CAAC,EAAE;YAC/B,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;YAC5C,MAAM,MAAM,CAAC,sBAAsB,CACjC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,CACT,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CACrC,MAA0B,EAC1B,MAAc;IAEd,MAAM,4BAA4B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,cAAc,QAAQ,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,gBAAgB,CAC3B,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CACrD,CAAC;QACF,MAAM,CAAC,QAAQ,EAAE,CAAC;KACnB;AACH,CAAC;AAED,2DAA2D;AAC3D,KAAK,UAAU,UAAU,CACvB,WAAmB,EACnB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,MAAM,MAAM,GAAG,kBAAS,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC3C,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,MAAM,CAAC,UAAU,CAAC,aAAa,QAAQ,EAAE,CAAC,CAAC;QAE3C,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QAC/C,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,qBAAqB,QAAQ,gDAAgD,CAC9E,CAAC;SACH;QAED,IAAI;YACF,MAAM,YAAY,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;YAC1E,uEAAuE;YACvE,2EAA2E;YAC3E,MAAM,UAAU,GAAG,GAAG,YAAY,cAAc,CAAC;YACjD,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YAC1E,EAAE,CAAC,aAAa,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;YACjD,MAAM,CAAC,KAAK,CACV,wBAAwB,QAAQ,QAAQ,kBAAkB,EAAE,CAC7D,CAAC;YAEF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,GAAG,QAAQ,QAAQ,CAAC,CAAC;YAE9D,MAAM,MAAM,CAAC,eAAe,CAC1B,YAAY,EACZ,SAAS,EACT,UAAU,EACV,UAAU,EACV,eAAe,EACf,WAAW,CACZ,CAAC;YAEF,MAAM,CAAC,KAAK,CACV,8BAA8B,QAAQ,gBAAgB,SAAS,GAAG,CACnE,CAAC;YACF,MAAM,CAAC,QAAQ,EAAE,CAAC;SACnB;QAAC,OAAO,CAAC,EAAE;YACV,+DAA+D;YAC/D,OAAO;gBACL,wBAAwB,EAAE,QAAQ;aACnC,CAAC;SACH;KACF;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAEM,KAAK,UAAU,UAAU,CAC9B,aAA4B,EAC5B,SAAiB,EACjB,GAAW,EACX,WAA+B,EAC/B,YAAgC,EAChC,aAAiC,EACjC,YAAoB,EACpB,WAA+B,EAC/B,UAAkB,EAClB,SAAiB,EACjB,QAAiB,EACjB,IAAe,EACf,SAAiB,EACjB,UAAkB,EAClB,eAAuB,EACvB,WAAmB,EACnB,MAA0B,EAC1B,MAAc;IAEd,8DAA8D;IAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;IAEzD,EAAE,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE7C,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;IAC5C,MAAM,wBAAwB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE/C,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAClC,MAAM,YAAY,GAAG,MAAM,UAAU,CACnC,SAAS,EACT,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,MAAM,CACP,CAAC;IAEF,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,CAAC,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACrC,OAAO,EAAE,GAAG,YAAY,EAAE,CAAC;KAC5B;IAED,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,SAAS,EACT,aAAa,EACb,SAAS,EACT,GAAG,EACH,WAAW,EACX,YAAY,EACZ,aAAa,EACb,YAAY,EACZ,WAAW,EACX,UAAU,EACV,SAAS,EACT,IAAI,EACJ,MAAM,CACP,CAAC;IAEF,OAAO,EAAE,GAAG,YAAY,EAAE,GAAG,WAAW,EAAE,CAAC;AAC7C,CAAC;AA5DD,gCA4DC"}

66
lib/analyze.test.js generated
View File

@@ -1,66 +0,0 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const ava_1 = __importDefault(require("ava"));
const analyze_1 = require("./analyze");
const codeql_1 = require("./codeql");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
// Checks that the duration fields are populated for the correct language
// and correct case of builtin or custom.
ava_1.default("status report fields", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
codeql_1.setCodeQL({
databaseAnalyze: async () => undefined,
});
const memoryFlag = "";
const addSnippetsFlag = "";
const threadsFlag = "";
for (const language of Object.values(languages_1.Language)) {
const config = {
languages: [language],
queries: {},
pathsIgnore: [],
paths: [],
originalUserInput: {},
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
gitHubVersion: {
type: util.GitHubVariant.DOTCOM,
},
};
fs.mkdirSync(util.getCodeQLDatabasePath(config.tempDir, language), {
recursive: true,
});
config.queries[language] = {
builtin: ["foo.ql"],
custom: [],
};
const builtinStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logging_1.getRunnerLogger(true));
t.deepEqual(Object.keys(builtinStatusReport).length, 1);
t.true(`analyze_builtin_queries_${language}_duration_ms` in builtinStatusReport);
config.queries[language] = {
builtin: [],
custom: ["foo.ql"],
};
const customStatusReport = await analyze_1.runQueries(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, config, logging_1.getRunnerLogger(true));
t.deepEqual(Object.keys(customStatusReport).length, 1);
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
}
});
});
//# sourceMappingURL=analyze.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"analyze.test.js","sourceRoot":"","sources":["../src/analyze.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AAEzB,8CAAuB;AAEvB,uCAAuC;AACvC,qCAAqC;AAErC,2CAAuC;AACvC,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,yEAAyE;AACzE,yCAAyC;AACzC,aAAI,CAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,kBAAS,CAAC;YACR,eAAe,EAAE,KAAK,IAAI,EAAE,CAAC,SAAS;SACvC,CAAC,CAAC;QAEH,MAAM,UAAU,GAAG,EAAE,CAAC;QACtB,MAAM,eAAe,GAAG,EAAE,CAAC;QAC3B,MAAM,WAAW,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,MAAM,CAAC,oBAAQ,CAAC,EAAE;YAC9C,MAAM,MAAM,GAAW;gBACrB,SAAS,EAAE,CAAC,QAAQ,CAAC;gBACrB,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,EAAE;gBACf,KAAK,EAAE,EAAE;gBACT,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,MAAM;gBACf,YAAY,EAAE,MAAM;gBACpB,SAAS,EAAE,EAAE;gBACb,aAAa,EAAE;oBACb,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;iBACV;aACxB,CAAC;YACF,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE;gBACjE,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,CAAC,QAAQ,CAAC;gBACnB,MAAM,EAAE,EAAE;aACX,CAAC;YACF,MAAM,mBAAmB,GAAG,MAAM,oBAAU,CAC1C,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACxD,CAAC,CAAC,IAAI,CACJ,2BAA2B,QAAQ,cAAc,IAAI,mBAAmB,CACzE,CAAC;YAEF,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG;gBACzB,OAAO,EAAE,EAAE;gBACX,MAAM,EAAE,CAAC,QAAQ,CAAC;aACnB,CAAC;YACF,MAAM,kBAAkB,GAAG,MAAM,oBAAU,CACzC,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;YACvD,CAAC,CAAC,IAAI,CACJ,0BAA0B,QAAQ,cAAc,IAAI,kBAAkB,CACvE,CAAC;SACH;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

30
lib/api-client.js generated
View File

@@ -10,29 +10,25 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const path = __importStar(require("path"));
const githubUtils = __importStar(require("@actions/github/lib/utils"));
const core = __importStar(require("@actions/core"));
const github = __importStar(require("@actions/github"));
const console_log_level_1 = __importDefault(require("console-log-level"));
const actions_util_1 = require("./actions-util");
const path = __importStar(require("path"));
const util_1 = require("./util");
var DisallowedAPIVersionReason;
(function (DisallowedAPIVersionReason) {
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
exports.getApiClient = function (apiDetails, allowLocalRun = false) {
exports.getApiClient = function (githubAuth, githubUrl, allowLocalRun = false) {
if (util_1.isLocalRun() && !allowLocalRun) {
throw new Error("Invalid API call in local run");
}
return new githubUtils.GitHub(githubUtils.getOctokitOptions(apiDetails.auth, {
baseUrl: getApiUrl(apiDetails.url),
return new github.GitHub({
auth: githubAuth,
baseUrl: getApiUrl(githubUrl),
userAgent: "CodeQL Action",
log: console_log_level_1.default({ level: "debug" }),
}));
});
};
function getApiUrl(githubUrl) {
const url = new URL(githubUrl);
// If we detect this is trying to connect to github.com
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
return "https://api.github.com";
@@ -42,14 +38,10 @@ function getApiUrl(githubUrl) {
return url.toString();
}
// Temporary function to aid in the transition to running on and off of github actions.
// Once all code has been converted this function should be removed or made canonical
// Once all code has been coverted this function should be removed or made canonical
// and called only from the action entrypoints.
function getActionsApiClient(allowLocalRun = false) {
const apiDetails = {
auth: actions_util_1.getRequiredInput("token"),
url: actions_util_1.getRequiredEnvParam("GITHUB_SERVER_URL"),
};
return exports.getApiClient(apiDetails, allowLocalRun);
return exports.getApiClient(core.getInput("token", { required: true }), util_1.getRequiredEnvParam("GITHUB_SERVER_URL"), allowLocalRun);
}
exports.getActionsApiClient = getActionsApiClient;
//# sourceMappingURL=api-client.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2CAA6B;AAE7B,uEAAyD;AACzD,0EAAgD;AAEhD,iDAAuE;AACvE,iCAAoC;AAEpC,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAeY,QAAA,YAAY,GAAG,UAC1B,UAA4B,EAC5B,aAAa,GAAG,KAAK;IAErB,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,WAAW,CAAC,MAAM,CAC3B,WAAW,CAAC,iBAAiB,CAAC,UAAU,CAAC,IAAI,EAAE;QAC7C,OAAO,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC;QAClC,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,uDAAuD;IACvD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,qFAAqF;AACrF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,+BAAgB,CAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,kCAAmB,CAAC,mBAAmB,CAAC;KAC9C,CAAC;IAEF,OAAO,oBAAY,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;AACjD,CAAC;AAPD,kDAOC"}
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,oDAAsC;AACtC,wDAA0C;AAC1C,0EAAgD;AAChD,2CAA6B;AAE7B,iCAAyD;AAE5C,QAAA,YAAY,GAAG,UAC1B,UAAkB,EAClB,SAAiB,EACjB,aAAa,GAAG,KAAK;IAErB,IAAI,iBAAU,EAAE,IAAI,CAAC,aAAa,EAAE;QAClC,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;KAClD;IACD,OAAO,IAAI,MAAM,CAAC,MAAM,CAAC;QACvB,IAAI,EAAE,UAAU;QAChB,OAAO,EAAE,SAAS,CAAC,SAAS,CAAC;QAC7B,SAAS,EAAE,eAAe;QAC1B,GAAG,EAAE,2BAAe,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CAAC;AACL,CAAC,CAAC;AAEF,SAAS,SAAS,CAAC,SAAiB;IAClC,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;IAE/B,kDAAkD;IAClD,0CAA0C;IAC1C,IAAI,GAAG,CAAC,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,QAAQ,KAAK,gBAAgB,EAAE;QACtE,OAAO,wBAAwB,CAAC;KACjC;IAED,6BAA6B;IAC7B,GAAG,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;AACxB,CAAC;AAED,uFAAuF;AACvF,oFAAoF;AACpF,+CAA+C;AAC/C,SAAgB,mBAAmB,CAAC,aAAa,GAAG,KAAK;IACvD,OAAO,oBAAY,CACjB,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,EAC1C,0BAAmB,CAAC,mBAAmB,CAAC,EACxC,aAAa,CACd,CAAC;AACJ,CAAC;AAND,kDAMC"}

View File

@@ -1 +0,0 @@
{ "maximumVersion": "3.1", "minimumVersion": "2.22" }

View File

@@ -8,33 +8,33 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const autobuild_1 = require("./autobuild");
const config_utils = __importStar(require("./config-utils"));
const logging_1 = require("./logging");
const util = __importStar(require("./util"));
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
var _a, _b;
const status = failingLanguage !== undefined || cause !== undefined
? "failure"
: "success";
const statusReportBase = await actionsUtil.createStatusReportBase("autobuild", status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
const statusReportBase = await util.createStatusReportBase("autobuild", status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
const statusReport = {
...statusReportBase,
autobuild_languages: allLanguages.join(","),
autobuild_failure: failingLanguage,
};
await actionsUtil.sendStatusReport(statusReport);
await util.sendStatusReport(statusReport);
}
async function run() {
const logger = logging_1.getActionsLogger();
const startedAt = new Date();
let language = undefined;
try {
actionsUtil.prepareLocalRunEnvironment();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("autobuild", "starting", startedAt)))) {
util.prepareLocalRunEnvironment();
if (!(await util.sendStatusReport(await util.createStatusReportBase("autobuild", "starting", startedAt), true))) {
return;
}
const config = await config_utils.getConfig(actionsUtil.getTemporaryDirectory(), logger);
const config = await config_utils.getConfig(util.getRequiredEnvParam("RUNNER_TEMP"), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
}
@@ -51,14 +51,8 @@ async function run() {
}
await sendCompletedStatusReport(startedAt, language ? [language] : []);
}
async function runWrapper() {
try {
await run();
}
catch (error) {
core.setFailed(`autobuild action failed. ${error}`);
console.log(error);
}
}
void runWrapper();
run().catch((e) => {
core.setFailed(`autobuild action failed. ${e}`);
console.log(e);
});
//# sourceMappingURL=autobuild-action.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAS7C,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QACzC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,WAAW,EACX,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIAAmI,KAAK,CAAC,OAAO,EAAE,CACnJ,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,2CAAuE;AACvE,6DAA+C;AAE/C,uCAA6C;AAC7C,6CAA+B;AAS/B,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GACV,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS;QAClD,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,SAAS,CAAC;IAChB,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,GAAyB,SAAS,CAAC;IAC/C,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IACE,CAAC,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAC3B,MAAM,IAAI,CAAC,sBAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,EACrE,IAAI,CACL,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,SAAS,CACzC,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EACvC,MAAM,CACP,CAAC;QACF,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QACD,QAAQ,GAAG,sCAA0B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtD,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,MAAM,wBAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;SAC9C;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIAAmI,KAAK,CAAC,OAAO,EAAE,CACnJ,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,EAC1B,QAAQ,EACR,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AACzE,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,6BAA6B,CAAC,EAAE,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

243
lib/codeql.js generated
View File

@@ -10,18 +10,15 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const stream = __importStar(require("stream"));
const globalutil = __importStar(require("util"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
const http = __importStar(require("@actions/http-client"));
const toolcache = __importStar(require("@actions/tool-cache"));
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
const query_string_1 = __importDefault(require("query-string"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const semver = __importStar(require("semver"));
const uuid_1 = require("uuid");
const actions_util_1 = require("./actions-util");
const stream = __importStar(require("stream"));
const globalutil = __importStar(require("util"));
const v4_1 = __importDefault(require("uuid/v4"));
const api = __importStar(require("./api-client"));
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
const error_matcher_1 = require("./error-matcher");
@@ -33,87 +30,40 @@ const util = __importStar(require("./util"));
*/
let cachedCodeQL = undefined;
const CODEQL_BUNDLE_VERSION = defaults.bundleVersion;
const CODEQL_BUNDLE_NAME = "codeql-bundle.tar.gz";
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
function getCodeQLBundleName() {
let platform;
if (process.platform === "win32") {
platform = "win64";
}
else if (process.platform === "linux") {
platform = "linux64";
}
else if (process.platform === "darwin") {
platform = "osx64";
}
else {
return "codeql-bundle.tar.gz";
}
return `codeql-bundle-${platform}.tar.gz`;
}
function getCodeQLActionRepository(mode, logger) {
function getCodeQLActionRepository(mode) {
if (mode !== "actions") {
return CODEQL_DEFAULT_ACTION_REPOSITORY;
}
else {
return getActionsCodeQLActionRepository(logger);
}
}
function getActionsCodeQLActionRepository(logger) {
if (process.env["GITHUB_ACTION_REPOSITORY"] !== undefined) {
return process.env["GITHUB_ACTION_REPOSITORY"];
}
// The Actions Runner used with GitHub Enterprise Server 2.22 did not set the GITHUB_ACTION_REPOSITORY variable.
// This fallback logic can be removed after the end-of-support for 2.22 on 2021-09-23.
if (actions_util_1.isRunningLocalAction()) {
// This handles the case where the Action does not come from an Action repository,
// e.g. our integration tests which use the Action code from the current checkout.
logger.info("The CodeQL Action is checked out locally. Using the default CodeQL Action repository.");
// Actions do not know their own repository name,
// so we currently use this hack to find the name based on where our files are.
// This can be removed once the change to the runner in https://github.com/actions/runner/pull/585 is deployed.
const runnerTemp = util.getRequiredEnvParam("RUNNER_TEMP");
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
const relativeScriptPath = path.relative(actionsDirectory, __filename);
// This handles the case where the Action does not come from an Action repository,
// e.g. our integration tests which use the Action code from the current checkout.
if (relativeScriptPath.startsWith("..") ||
path.isAbsolute(relativeScriptPath)) {
return CODEQL_DEFAULT_ACTION_REPOSITORY;
}
logger.info("GITHUB_ACTION_REPOSITORY environment variable was not set. Falling back to legacy method of finding the GitHub Action.");
const relativeScriptPathParts = actions_util_1.getRelativeScriptPath().split(path.sep);
const relativeScriptPathParts = relativeScriptPath.split(path.sep);
return `${relativeScriptPathParts[0]}/${relativeScriptPathParts[1]}`;
}
async function getCodeQLBundleDownloadURL(apiDetails, mode, variant, logger) {
const codeQLActionRepository = getCodeQLActionRepository(mode, logger);
async function getCodeQLBundleDownloadURL(githubAuth, githubUrl, mode, logger) {
const codeQLActionRepository = getCodeQLActionRepository(mode);
const potentialDownloadSources = [
// This GitHub instance, and this Action.
[apiDetails.url, codeQLActionRepository],
[githubUrl, codeQLActionRepository],
// This GitHub instance, and the canonical Action.
[apiDetails.url, CODEQL_DEFAULT_ACTION_REPOSITORY],
[githubUrl, CODEQL_DEFAULT_ACTION_REPOSITORY],
// GitHub.com, and the canonical Action.
[util.GITHUB_DOTCOM_URL, CODEQL_DEFAULT_ACTION_REPOSITORY],
];
// We now filter out any duplicates.
// Duplicates will happen either because the GitHub instance is GitHub.com, or because the Action is not a fork.
const uniqueDownloadSources = potentialDownloadSources.filter((source, index, self) => {
return !self.slice(0, index).some((other) => fast_deep_equal_1.default(source, other));
});
const codeQLBundleName = getCodeQLBundleName();
if (variant === util.GitHubVariant.GHAE) {
try {
const release = await api
.getApiClient(apiDetails)
.request("GET /enterprise/code-scanning/codeql-bundle/find/{tag}", {
tag: CODEQL_BUNDLE_VERSION,
});
const assetID = release.data.assets[codeQLBundleName];
if (assetID !== undefined) {
const download = await api
.getApiClient(apiDetails)
.request("GET /enterprise/code-scanning/codeql-bundle/download/{asset_id}", { asset_id: assetID });
const downloadURL = download.data.url;
logger.info(`Found CodeQL bundle at GitHub AE endpoint with URL ${downloadURL}.`);
return downloadURL;
}
else {
logger.info(`Attempted to fetch bundle from GitHub AE endpoint but the bundle ${codeQLBundleName} was not found in the assets ${JSON.stringify(release.data.assets)}.`);
}
}
catch (e) {
logger.info(`Attempted to fetch bundle from GitHub AE endpoint but got error ${e}.`);
}
}
const uniqueDownloadSources = potentialDownloadSources.filter((url, index, self) => index === self.indexOf(url));
for (const downloadSource of uniqueDownloadSources) {
const [apiURL, repository] = downloadSource;
// If we've reached the final case, short-circuit the API check since we know the bundle exists and is public.
@@ -123,13 +73,15 @@ async function getCodeQLBundleDownloadURL(apiDetails, mode, variant, logger) {
}
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release = await api.getApiClient(apiDetails).repos.getReleaseByTag({
const release = await api
.getApiClient(githubAuth, githubUrl)
.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: CODEQL_BUNDLE_VERSION,
});
for (const asset of release.data.assets) {
if (asset.name === codeQLBundleName) {
if (asset.name === CODEQL_BUNDLE_NAME) {
logger.info(`Found CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} with URL ${asset.url}.`);
return asset.url;
}
@@ -139,13 +91,13 @@ async function getCodeQLBundleDownloadURL(apiDetails, mode, variant, logger) {
logger.info(`Looked for CodeQL bundle in ${downloadSource[1]} on ${downloadSource[0]} but got error ${e}.`);
}
}
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${codeQLBundleName}`;
return `https://github.com/${CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${CODEQL_BUNDLE_VERSION}/${CODEQL_BUNDLE_NAME}`;
}
// We have to download CodeQL manually because the toolcache doesn't support Accept headers.
// This can be removed once https://github.com/actions/toolkit/pull/530 is merged and released.
async function toolcacheDownloadTool(url, headers, tempDir, logger) {
const client = new http.HttpClient("CodeQL Action");
const dest = path.join(tempDir, uuid_1.v4());
const dest = path.join(tempDir, v4_1.default());
const response = await client.get(url, headers);
if (response.message.statusCode !== 200) {
logger.info(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
@@ -156,49 +108,29 @@ async function toolcacheDownloadTool(url, headers, tempDir, logger) {
await pipeline(response.message, fs.createWriteStream(dest));
return dest;
}
async function setupCodeQL(codeqlURL, apiDetails, tempDir, mode, variant, logger) {
async function setupCodeQL(codeqlURL, githubAuth, githubUrl, tempDir, toolsDir, mode, logger) {
// Setting these two env vars makes the toolcache code safe to use outside,
// of actions but this is obviously not a great thing we're doing and it would
// be better to write our own implementation to use outside of actions.
process.env["RUNNER_TEMP"] = tempDir;
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
try {
// We use the special value of 'latest' to prioritize the version in the
// defaults over any pinned cached version.
const forceLatest = codeqlURL === "latest";
if (forceLatest) {
codeqlURL = undefined;
}
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`);
const codeqlURLSemVer = convertToSemVer(codeqlURLVersion, logger);
// If we find the specified version, we always use that.
let codeqlFolder = toolcache.find("CodeQL", codeqlURLSemVer);
// If we don't find the requested version, in some cases we may allow a
// different version to save download time if the version hasn't been
// specified explicitly (in which case we always honor it).
if (!codeqlFolder && !codeqlURL && !forceLatest) {
const codeqlVersions = toolcache.findAllVersions("CodeQL");
if (codeqlVersions.length === 1) {
const tmpCodeqlFolder = toolcache.find("CodeQL", codeqlVersions[0]);
if (fs.existsSync(path.join(tmpCodeqlFolder, "pinned-version"))) {
logger.debug(`CodeQL in cache overriding the default ${CODEQL_BUNDLE_VERSION}`);
codeqlFolder = tmpCodeqlFolder;
}
}
}
const codeqlURLVersion = getCodeQLURLVersion(codeqlURL || `/${CODEQL_BUNDLE_VERSION}/`, logger);
let codeqlFolder = toolcache.find("CodeQL", codeqlURLVersion);
if (codeqlFolder) {
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
}
else {
if (!codeqlURL) {
codeqlURL = await getCodeQLBundleDownloadURL(apiDetails, mode, variant, logger);
codeqlURL = await getCodeQLBundleDownloadURL(githubAuth, githubUrl, mode, logger);
}
const parsedCodeQLURL = new URL(codeqlURL);
const parsedQueryString = query_string_1.default.parse(parsedCodeQLURL.search);
const headers = { accept: "application/octet-stream" };
// We only want to provide an authorization header if we are downloading
// from the same GitHub instance the Action is running on.
// This avoids leaking Enterprise tokens to dotcom.
// We also don't want to send an authorization header if there's already a token provided in the URL.
if (codeqlURL.startsWith(`${apiDetails.url}/`) &&
parsedQueryString["token"] === undefined) {
if (codeqlURL.startsWith(`${githubUrl}/`)) {
logger.debug("Downloading CodeQL bundle with token.");
headers.authorization = `token ${apiDetails.auth}`;
headers.authorization = `token ${githubAuth}`;
}
else {
logger.debug("Downloading CodeQL bundle without token.");
@@ -207,7 +139,7 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, mode, variant, logger
const codeqlPath = await toolcacheDownloadTool(codeqlURL, headers, tempDir, logger);
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLSemVer);
codeqlFolder = await toolcache.cacheDir(codeqlExtracted, "CodeQL", codeqlURLVersion);
}
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
if (process.platform === "win32") {
@@ -217,7 +149,7 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, mode, variant, logger
throw new Error(`Unsupported platform: ${process.platform}`);
}
cachedCodeQL = getCodeQLForCmd(codeqlCmd);
return { codeql: cachedCodeQL, toolsVersion: codeqlURLVersion };
return cachedCodeQL;
}
catch (e) {
logger.error(e);
@@ -225,26 +157,23 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, mode, variant, logger
}
}
exports.setupCodeQL = setupCodeQL;
function getCodeQLURLVersion(url) {
function getCodeQLURLVersion(url, logger) {
const match = url.match(/\/codeql-bundle-(.*)\//);
if (match === null || match.length < 2) {
throw new Error(`Malformed tools url: ${url}. Version could not be inferred`);
}
return match[1];
}
exports.getCodeQLURLVersion = getCodeQLURLVersion;
function convertToSemVer(version, logger) {
let version = match[1];
if (!semver.valid(version)) {
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
version = `0.0.0-${version}`;
}
const s = semver.clean(version);
if (!s) {
throw new Error(`Bundle version ${version} is not in SemVer format.`);
throw new Error(`Malformed tools url ${url}. Version should be in SemVer format but have ${version} instead`);
}
return s;
}
exports.convertToSemVer = convertToSemVer;
exports.getCodeQLURLVersion = getCodeQLURLVersion;
/**
* Use the CodeQL executable located at the given path.
*/
@@ -308,7 +237,10 @@ function getCodeQLForCmd(cmd) {
return cmd;
},
async printVersion() {
await new toolrunner.ToolRunner(cmd, ["version", "--format=json"]).exec();
await new toolrunnner.ToolRunner(cmd, [
"version",
"--format=json",
]).exec();
},
async getTracerEnv(databasePath) {
// Write tracer-env.js to a temp location.
@@ -327,7 +259,7 @@ function getCodeQLForCmd(cmd) {
process.stdout.write(process.argv[2]);
fs.writeFileSync(process.argv[2], JSON.stringify(env), 'utf-8');`);
const envFile = path.resolve(databasePath, "working", "env.tmp");
await new toolrunner.ToolRunner(cmd, [
await new toolrunnner.ToolRunner(cmd, [
"database",
"trace-command",
databasePath,
@@ -339,7 +271,7 @@ function getCodeQLForCmd(cmd) {
return JSON.parse(fs.readFileSync(envFile, "utf-8"));
},
async databaseInit(databasePath, language, sourceRoot) {
await new toolrunner.ToolRunner(cmd, [
await new toolrunnner.ToolRunner(cmd, [
"database",
"init",
databasePath,
@@ -362,12 +294,12 @@ function getCodeQLForCmd(cmd) {
"-Dhttp.keepAlive=false",
"-Dmaven.wagon.http.pool=false",
].join(" ");
await new toolrunner.ToolRunner(autobuildCmd).exec();
await new toolrunnner.ToolRunner(autobuildCmd).exec();
},
async extractScannedLanguage(databasePath, language) {
// Get extractor location
let extractorPath = "";
await new toolrunner.ToolRunner(cmd, [
await new toolrunnner.ToolRunner(cmd, [
"resolve",
"extractor",
"--format=json",
@@ -397,11 +329,10 @@ function getCodeQLForCmd(cmd) {
traceCommand,
], error_matcher_1.errorMatchers);
},
async finalizeDatabase(databasePath, threadsFlag) {
async finalizeDatabase(databasePath) {
await toolrunner_error_catcher_1.toolrunnerErrorCatcher(cmd, [
"database",
"finalize",
threadsFlag,
...getExtraOptionsFromEnv(["database", "finalize"]),
databasePath,
], error_matcher_1.errorMatchers);
@@ -418,7 +349,7 @@ function getCodeQLForCmd(cmd) {
codeqlArgs.push("--search-path", extraSearchPath);
}
let output = "";
await new toolrunner.ToolRunner(cmd, codeqlArgs, {
await new toolrunnner.ToolRunner(cmd, codeqlArgs, {
listeners: {
stdout: (data) => {
output += data.toString();
@@ -428,15 +359,13 @@ function getCodeQLForCmd(cmd) {
return JSON.parse(output);
},
async databaseAnalyze(databasePath, sarifFile, querySuite, memoryFlag, addSnippetsFlag, threadsFlag) {
await new toolrunner.ToolRunner(cmd, [
await new toolrunnner.ToolRunner(cmd, [
"database",
"analyze",
memoryFlag,
threadsFlag,
databasePath,
"--min-disk-free=1024",
"--format=sarif-latest",
"--sarif-multicause-markdown",
`--output=${sarifFile}`,
addSnippetsFlag,
...getExtraOptionsFromEnv(["database", "analyze"]),
@@ -448,46 +377,44 @@ function getCodeQLForCmd(cmd) {
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*/
function getExtraOptionsFromEnv(paths) {
function getExtraOptionsFromEnv(path) {
const options = util.getExtraOptionsEnvParam();
return getExtraOptions(options, paths, []);
}
/**
* Gets `options` as an array of extra option strings.
*
* - throws an exception mentioning `pathInfo` if this conversion is impossible.
*/
function asExtraOptions(options, pathInfo) {
if (options === undefined) {
return [];
}
if (!Array.isArray(options)) {
const msg = `The extra options for '${pathInfo.join(".")}' ('${JSON.stringify(options)}') are not in an array.`;
throw new Error(msg);
}
return options.map((o) => {
const t = typeof o;
if (t !== "string" && t !== "number" && t !== "boolean") {
const msg = `The extra option for '${pathInfo.join(".")}' ('${JSON.stringify(o)}') is not a primitive value.`;
throw new Error(msg);
}
return `${o}`;
});
return getExtraOptions(options, path, []);
}
/**
* Gets the options for `path` of `options` as an array of extra option strings.
*
* - the special terminal step name '*' in `options` matches all path steps
* - throws an exception if this conversion is impossible.
*
* Exported for testing.
*/
function getExtraOptions(options, paths, pathInfo) {
function getExtraOptions(options, path, pathInfo) {
var _a, _b, _c;
/**
* Gets `options` as an array of extra option strings.
*
* - throws an exception mentioning `pathInfo` if this conversion is impossible.
*/
function asExtraOptions(options, pathInfo) {
if (options === undefined) {
return [];
}
if (!Array.isArray(options)) {
const msg = `The extra options for '${pathInfo.join(".")}' ('${JSON.stringify(options)}') are not in an array.`;
throw new Error(msg);
}
return options.map((o) => {
const t = typeof o;
if (t !== "string" && t !== "number" && t !== "boolean") {
const msg = `The extra option for '${pathInfo.join(".")}' ('${JSON.stringify(o)}') is not a primitive value.`;
throw new Error(msg);
}
return `${o}`;
});
}
const all = asExtraOptions((_a = options) === null || _a === void 0 ? void 0 : _a["*"], pathInfo.concat("*"));
const specific = paths.length === 0
const specific = path.length === 0
? asExtraOptions(options, pathInfo)
: getExtraOptions((_b = options) === null || _b === void 0 ? void 0 : _b[paths[0]], (_c = paths) === null || _c === void 0 ? void 0 : _c.slice(1), pathInfo.concat(paths[0]));
: getExtraOptions((_b = options) === null || _b === void 0 ? void 0 : _b[path[0]], (_c = path) === null || _c === void 0 ? void 0 : _c.slice(1), pathInfo.concat(path[0]));
return all.concat(specific);
}
exports.getExtraOptions = getExtraOptions;

File diff suppressed because one or more lines are too long

118
lib/codeql.test.js generated
View File

@@ -10,142 +10,31 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const path = __importStar(require("path"));
const toolcache = __importStar(require("@actions/tool-cache"));
const ava_1 = __importDefault(require("ava"));
const nock_1 = __importDefault(require("nock"));
const path = __importStar(require("path"));
const codeql = __importStar(require("./codeql"));
const defaults = __importStar(require("./defaults.json"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
const sampleApiDetails = {
auth: "token",
url: "https://github.com",
};
const sampleGHAEApiDetails = {
auth: "token",
url: "https://example.githubenterprise.com",
};
ava_1.default("download codeql bundle cache", async (t) => {
await util.withTmpDir(async (tmpDir) => {
util.setupActionsVars(tmpDir, tmpDir);
const versions = ["20200601", "20200610"];
for (let i = 0; i < versions.length; i++) {
const version = versions[i];
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-${version}/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
await codeql.setupCodeQL(`https://example.com/download/codeql-bundle-${version}/codeql-bundle.tar.gz`, "token", "https://github.example.com", tmpDir, tmpDir, "runner", logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
}
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
ava_1.default("download codeql bundle cache explicitly requested with pinned different version cached", async (t) => {
await util.withTmpDir(async (tmpDir) => {
util.setupActionsVars(tmpDir, tmpDir);
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-20200610/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200610/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
});
});
ava_1.default("don't download codeql bundle cache with pinned different version cached", async (t) => {
await util.withTmpDir(async (tmpDir) => {
util.setupActionsVars(tmpDir, tmpDir);
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1);
});
});
ava_1.default("download codeql bundle cache with different version cached (not pinned)", async (t) => {
await util.withTmpDir(async (tmpDir) => {
util.setupActionsVars(tmpDir, tmpDir);
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
const platform = process.platform === "win32"
? "win64"
: process.platform === "linux"
? "linux64"
: "osx64";
nock_1.default("https://github.com")
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
ava_1.default('download codeql bundle cache with pinned different version cached if "latest" tools specified', async (t) => {
await util.withTmpDir(async (tmpDir) => {
util.setupActionsVars(tmpDir, tmpDir);
nock_1.default("https://example.com")
.get(`/download/codeql-bundle-20200601/codeql-bundle.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
await codeql.setupCodeQL("https://example.com/download/codeql-bundle-20200601/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
const platform = process.platform === "win32"
? "win64"
: process.platform === "linux"
? "linux64"
: "osx64";
nock_1.default("https://github.com")
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/codeql-bundle-${platform}.tar.gz`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle.tar.gz`));
await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, "runner", util.GitHubVariant.DOTCOM, logging_1.getRunnerLogger(true));
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
ava_1.default("download codeql bundle from github ae endpoint", async (t) => {
await util.withTmpDir(async (tmpDir) => {
util.setupActionsVars(tmpDir, tmpDir);
const bundleAssetID = 10;
const platform = process.platform === "win32"
? "win64"
: process.platform === "linux"
? "linux64"
: "osx64";
const codeQLBundleName = `codeql-bundle-${platform}.tar.gz`;
nock_1.default("https://example.githubenterprise.com")
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/find/${defaults.bundleVersion}`)
.reply(200, {
assets: { [codeQLBundleName]: bundleAssetID },
});
nock_1.default("https://example.githubenterprise.com")
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/download/${bundleAssetID}`)
.reply(200, {
url: `https://example.githubenterprise.com/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`,
});
nock_1.default("https://example.githubenterprise.com")
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, "runner", util.GitHubVariant.GHAE, logging_1.getRunnerLogger(true));
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1);
});
});
ava_1.default("parse codeql bundle url version", (t) => {
t.deepEqual(codeql.getCodeQLURLVersion("https://github.com/.../codeql-bundle-20200601/..."), "20200601");
});
ava_1.default("convert to semver", (t) => {
const tests = {
"20200601": "0.0.0-20200601",
"20200601.0": "0.0.0-20200601.0",
@@ -155,8 +44,9 @@ ava_1.default("convert to semver", (t) => {
"1.2.3-beta.1": "1.2.3-beta.1",
};
for (const [version, expectedVersion] of Object.entries(tests)) {
const url = `https://github.com/.../codeql-bundle-${version}/...`;
try {
const parsedVersion = codeql.convertToSemVer(version, logging_1.getRunnerLogger(true));
const parsedVersion = codeql.getCodeQLURLVersion(url, logging_1.getRunnerLogger(true));
t.deepEqual(parsedVersion, expectedVersion);
}
catch (e) {

File diff suppressed because one or more lines are too long

136
lib/config-utils.js generated
View File

@@ -8,8 +8,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const yaml = __importStar(require("js-yaml"));
const path = __importStar(require("path"));
const api = __importStar(require("./api-client"));
const externalQueries = __importStar(require("./external-queries"));
const languages_1 = require("./languages");
@@ -58,31 +58,17 @@ function validateQueries(resolvedQueries) {
}
/**
* Run 'codeql resolve queries' and add the results to resultMap
*
* If a checkout path is given then the queries are assumed to be custom queries
* and an error will be thrown if there is anything invalid about the queries.
* If a checkout path is not given then the queries are assumed to be builtin
* queries, and error checking will be suppressed.
*/
async function runResolveQueries(codeQL, resultMap, toResolve, extraSearchPath) {
async function runResolveQueries(codeQL, resultMap, toResolve, extraSearchPath, errorOnInvalidQueries) {
const resolvedQueries = await codeQL.resolveQueries(toResolve, extraSearchPath);
if (extraSearchPath !== undefined) {
validateQueries(resolvedQueries);
}
for (const [language, queryPaths] of Object.entries(resolvedQueries.byLanguage)) {
for (const [language, queries] of Object.entries(resolvedQueries.byLanguage)) {
if (resultMap[language] === undefined) {
resultMap[language] = {
builtin: [],
custom: [],
};
}
const queries = Object.keys(queryPaths).filter((q) => !queryIsDisabled(language, q));
if (extraSearchPath !== undefined) {
resultMap[language].custom.push(...queries);
}
else {
resultMap[language].builtin.push(...queries);
resultMap[language] = [];
}
resultMap[language].push(...Object.keys(queries).filter((q) => !queryIsDisabled(language, q)));
}
if (errorOnInvalidQueries) {
validateQueries(resolvedQueries);
}
}
/**
@@ -90,7 +76,7 @@ async function runResolveQueries(codeQL, resultMap, toResolve, extraSearchPath)
*/
async function addDefaultQueries(codeQL, languages, resultMap) {
const suites = languages.map((l) => `${l}-code-scanning.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined);
await runResolveQueries(codeQL, resultMap, suites, undefined, false);
}
// The set of acceptable values for built-in suites from the codeql bundle
const builtinSuites = ["security-extended", "security-and-quality"];
@@ -99,12 +85,12 @@ const builtinSuites = ["security-extended", "security-and-quality"];
* Throws an error if suiteName is not a valid builtin suite.
*/
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, suiteName, configFile) {
const found = builtinSuites.find((suite) => suite === suiteName);
if (!found) {
const suite = builtinSuites.find((suite) => suite === suiteName);
if (!suite) {
throw new Error(getQueryUsesInvalid(configFile, suiteName));
}
const suites = languages.map((l) => `${l}-${suiteName}.qls`);
await runResolveQueries(codeQL, resultMap, suites, undefined);
await runResolveQueries(codeQL, resultMap, suites, undefined, false);
}
/**
* Retrieve the set of queries at localQueryPath and add them to resultMap.
@@ -123,12 +109,12 @@ async function addLocalQueries(codeQL, resultMap, localQueryPath, checkoutPath,
if (!(absoluteQueryPath + path.sep).startsWith(fs.realpathSync(checkoutPath) + path.sep)) {
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
}
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], checkoutPath);
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], checkoutPath, true);
}
/**
* Retrieve the set of queries at the referenced remote repo and add them to resultMap.
*/
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile) {
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile) {
let tok = queryUses.split("@");
if (tok.length !== 2) {
throw new Error(getQueryUsesInvalid(configFile, queryUses));
@@ -147,11 +133,11 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
}
const nwo = `${tok[0]}/${tok[1]}`;
// Checkout the external repository
const checkoutPath = await externalQueries.checkoutExternalRepository(nwo, ref, apiDetails, tempDir, logger);
const checkoutPath = await externalQueries.checkoutExternalRepository(nwo, ref, githubUrl, tempDir, logger);
const queryPath = tok.length > 2
? path.join(checkoutPath, tok.slice(2).join("/"))
: checkoutPath;
await runResolveQueries(codeQL, resultMap, [queryPath], checkoutPath);
await runResolveQueries(codeQL, resultMap, [queryPath], checkoutPath, true);
}
/**
* Parse a query 'uses' field to a discrete set of query files and update resultMap.
@@ -161,7 +147,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
* local paths starting with './', or references to remote repos, or
* a finite set of hardcoded terms for builtin suites.
*/
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, checkoutPath, apiDetails, logger, configFile) {
async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir, checkoutPath, githubUrl, logger, configFile) {
queryUses = queryUses.trim();
if (queryUses === "") {
throw new Error(getQueryUsesInvalid(configFile));
@@ -177,7 +163,7 @@ async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir,
return;
}
// Otherwise, must be a reference to another repo
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, githubUrl, logger, configFile);
}
// Regex validating stars in paths or paths-ignore entries.
// The intention is to only allow ** to appear when immediately
@@ -185,44 +171,44 @@ async function parseQueryUses(languages, codeQL, resultMap, queryUses, tempDir,
const pathStarsRegex = /.*(?:\*\*[^/].*|\*\*$|[^/]\*\*.*)/;
// Characters that are supported by filters in workflows, but not by us.
// See https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
const filterPatternCharactersRegex = /.*[?+[\]!].*/;
const filterPatternCharactersRegex = /.*[\?\+\[\]!].*/;
// Checks that a paths of paths-ignore entry is valid, possibly modifying it
// to make it valid, or if not possible then throws an error.
function validateAndSanitisePath(originalPath, propertyName, configFile, logger) {
// Take a copy so we don't modify the original path, so we can still construct error messages
let newPath = originalPath;
let path = originalPath;
// All paths are relative to the src root, so strip off leading slashes.
while (newPath.charAt(0) === "/") {
newPath = newPath.substring(1);
while (path.charAt(0) === "/") {
path = path.substring(1);
}
// Trailing ** are redundant, so strip them off
if (newPath.endsWith("/**")) {
newPath = newPath.substring(0, newPath.length - 2);
if (path.endsWith("/**")) {
path = path.substring(0, path.length - 2);
}
// An empty path is not allowed as it's meaningless
if (newPath === "") {
if (path === "") {
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" is not an invalid path. ` +
`It is not necessary to include it, and it is not allowed to exclude it.`));
}
// Check for illegal uses of **
if (newPath.match(pathStarsRegex)) {
if (path.match(pathStarsRegex)) {
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an invalid "**" wildcard. ` +
`They must be immediately preceded and followed by a slash as in "/**/", or come at the start or end.`));
`They must be immediately preceeded and followed by a slash as in "/**/", or come at the start or end.`));
}
// Check for other regex characters that we don't support.
// Output a warning so the user knows, but otherwise continue normally.
if (newPath.match(filterPatternCharactersRegex)) {
if (path.match(filterPatternCharactersRegex)) {
logger.warning(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an unsupported character. ` +
`The filter pattern characters ?, +, [, ], ! are not supported and will be matched literally.`));
}
// Ban any uses of backslash for now.
// This may not play nicely with project layouts.
// This restriction can be lifted later if we determine they are ok.
if (newPath.indexOf("\\") !== -1) {
if (path.indexOf("\\") !== -1) {
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an "\\" character. These are not allowed in filters. ` +
`If running on windows we recommend using "/" instead for path filters.`));
}
return newPath;
return path;
}
exports.validateAndSanitisePath = validateAndSanitisePath;
// An undefined configFile in some of these functions indicates that
@@ -301,10 +287,10 @@ exports.getUnknownLanguagesError = getUnknownLanguagesError;
/**
* Gets the set of languages in the current repository
*/
async function getLanguagesInRepo(repository, apiDetails, logger) {
async function getLanguagesInRepo(repository, githubAuth, githubUrl, logger) {
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
const response = await api
.getApiClient(apiDetails, true)
.getApiClient(githubAuth, githubUrl, true)
.repos.listLanguages({
owner: repository.owner,
repo: repository.repo,
@@ -333,7 +319,7 @@ async function getLanguagesInRepo(repository, apiDetails, logger) {
* If no languages could be detected from either the workflow or the repository
* then throw an error.
*/
async function getLanguages(languagesInput, repository, apiDetails, logger) {
async function getLanguages(languagesInput, repository, githubAuth, githubUrl, logger) {
// Obtain from action input 'languages' if set
let languages = (languagesInput || "")
.split(",")
@@ -342,7 +328,7 @@ async function getLanguages(languagesInput, repository, apiDetails, logger) {
logger.info(`Languages from configuration: ${JSON.stringify(languages)}`);
if (languages.length === 0) {
// Obtain languages as all languages in the repo that can be analysed
languages = await getLanguagesInRepo(repository, apiDetails, logger);
languages = await getLanguagesInRepo(repository, githubAuth, githubUrl, logger);
logger.info(`Automatically detected languages: ${JSON.stringify(languages)}`);
}
// If the languages parameter was not given and no languages were
@@ -367,12 +353,12 @@ async function getLanguages(languagesInput, repository, apiDetails, logger) {
}
return parsedLanguages;
}
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, apiDetails, logger) {
async function addQueriesFromWorkflow(codeQL, queriesInput, languages, resultMap, tempDir, checkoutPath, githubUrl, logger) {
queriesInput = queriesInput.trim();
// "+" means "don't override config file" - see shouldAddConfigFileQueries
queriesInput = queriesInput.replace(/^\+/, "");
for (const query of queriesInput.split(",")) {
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, apiDetails, logger);
await parseQueryUses(languages, codeQL, resultMap, query, tempDir, checkoutPath, githubUrl, logger);
}
}
// Returns true if either no queries were provided in the workflow.
@@ -388,12 +374,12 @@ function shouldAddConfigFileQueries(queriesInput) {
/**
* Get the default config for when the user has not supplied one.
*/
async function getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
const languages = await getLanguages(languagesInput, repository, apiDetails, logger);
async function getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) {
const languages = await getLanguages(languagesInput, repository, githubAuth, githubUrl, logger);
const queries = {};
await addDefaultQueries(codeQL, languages, queries);
if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails, logger);
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, githubUrl, logger);
}
return {
languages,
@@ -404,14 +390,13 @@ async function getDefaultConfig(languagesInput, queriesInput, repository, tempDi
tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(),
gitHubVersion,
};
}
exports.getDefaultConfig = getDefaultConfig;
/**
* Load the config from the given file.
*/
async function loadConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
async function loadConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) {
let parsedYAML;
if (isLocal(configFile)) {
// Treat the config file as relative to the workspace
@@ -419,7 +404,7 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
parsedYAML = getLocalConfig(configFile, checkoutPath);
}
else {
parsedYAML = await getRemoteConfig(configFile, apiDetails);
parsedYAML = await getRemoteConfig(configFile, githubAuth, githubUrl);
}
// Validate that the 'name' property is syntactically correct,
// even though we don't use the value yet.
@@ -431,7 +416,7 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
throw new Error(getNameInvalid(configFile));
}
}
const languages = await getLanguages(languagesInput, repository, apiDetails, logger);
const languages = await getLanguages(languagesInput, repository, githubAuth, githubUrl, logger);
const queries = {};
const pathsIgnore = [];
const paths = [];
@@ -450,7 +435,7 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
// unless they're prefixed with "+", in which case they supplement those
// in the config file.
if (queriesInput) {
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, apiDetails, logger);
await addQueriesFromWorkflow(codeQL, queriesInput, languages, queries, tempDir, checkoutPath, githubUrl, logger);
}
if (shouldAddConfigFileQueries(queriesInput) &&
QUERIES_PROPERTY in parsedYAML) {
@@ -462,37 +447,35 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
typeof query[QUERIES_USES_PROPERTY] !== "string") {
throw new Error(getQueryUsesInvalid(configFile));
}
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, apiDetails, logger, configFile);
await parseQueryUses(languages, codeQL, queries, query[QUERIES_USES_PROPERTY], tempDir, checkoutPath, githubUrl, logger, configFile);
}
}
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
throw new Error(getPathsIgnoreInvalid(configFile));
}
for (const ignorePath of parsedYAML[PATHS_IGNORE_PROPERTY]) {
if (typeof ignorePath !== "string" || ignorePath === "") {
parsedYAML[PATHS_IGNORE_PROPERTY].forEach((path) => {
if (typeof path !== "string" || path === "") {
throw new Error(getPathsIgnoreInvalid(configFile));
}
pathsIgnore.push(validateAndSanitisePath(ignorePath, PATHS_IGNORE_PROPERTY, configFile, logger));
}
pathsIgnore.push(validateAndSanitisePath(path, PATHS_IGNORE_PROPERTY, configFile, logger));
});
}
if (PATHS_PROPERTY in parsedYAML) {
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
throw new Error(getPathsInvalid(configFile));
}
for (const includePath of parsedYAML[PATHS_PROPERTY]) {
if (typeof includePath !== "string" || includePath === "") {
parsedYAML[PATHS_PROPERTY].forEach((path) => {
if (typeof path !== "string" || path === "") {
throw new Error(getPathsInvalid(configFile));
}
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
}
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile, logger));
});
}
// The list of queries should not be empty for any language. If it is then
// it is a user configuration error.
for (const language of languages) {
if (queries[language] === undefined ||
(queries[language].builtin.length === 0 &&
queries[language].custom.length === 0)) {
if (queries[language] === undefined || queries[language].length === 0) {
throw new Error(`Did not detect any queries to run for ${language}. ` +
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
}
@@ -506,7 +489,6 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
tempDir,
toolCacheDir,
codeQLCmd: codeQL.getPath(),
gitHubVersion,
};
}
/**
@@ -515,15 +497,15 @@ async function loadConfig(languagesInput, queriesInput, configFile, repository,
* This will parse the config from the user input if present, or generate
* a default config. The parsed config is then stored to a known location.
*/
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) {
let config;
// If no config file was provided create an empty one
if (!configFile) {
logger.debug("No configuration file was provided");
config = await getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger);
config = await getDefaultConfig(languagesInput, queriesInput, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger);
}
else {
config = await loadConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger);
config = await loadConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger);
}
// Save the config so we can easily access it again in the future
await saveConfig(config, logger);
@@ -548,7 +530,7 @@ function getLocalConfig(configFile, checkoutPath) {
}
return yaml.safeLoad(fs.readFileSync(configFile, "utf8"));
}
async function getRemoteConfig(configFile, apiDetails) {
async function getRemoteConfig(configFile, githubAuth, githubUrl) {
// retrieve the various parts of the config location, and ensure they're present
const format = new RegExp("(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)");
const pieces = format.exec(configFile);
@@ -556,7 +538,9 @@ async function getRemoteConfig(configFile, apiDetails) {
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
}
const response = await api.getApiClient(apiDetails, true).repos.getContent({
const response = await api
.getApiClient(githubAuth, githubUrl, true)
.repos.getContents({
owner: pieces.groups.owner,
repo: pieces.groups.repo,
path: pieces.groups.path,

File diff suppressed because one or more lines are too long

127
lib/config-utils.test.js generated
View File

@@ -10,10 +10,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const github = __importStar(require("@actions/github"));
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const sinon_1 = __importDefault(require("sinon"));
const api = __importStar(require("./api-client"));
const codeql_1 = require("./codeql");
@@ -23,12 +23,6 @@ const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
testing_utils_1.setupTests(ava_1.default);
const sampleApiDetails = {
auth: "token",
externalRepoAuth: "token",
url: "https://github.example.com",
};
const gitHubVersion = { type: util.GitHubVariant.DOTCOM };
// Returns the filepath of the newly-created file
function createConfigFile(inputFileContents, tmpDir) {
const configFilePath = path.join(tmpDir, "input");
@@ -37,19 +31,19 @@ function createConfigFile(inputFileContents, tmpDir) {
}
function mockGetContents(content) {
// Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123");
const client = new github.GitHub("123");
const response = {
data: content,
};
const spyGetContents = sinon_1.default
.stub(client.repos, "getContent")
.stub(client.repos, "getContents")
.resolves(response);
sinon_1.default.stub(api, "getApiClient").value(() => client);
return spyGetContents;
}
function mockListLanguages(languages) {
// Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123");
const client = new github.GitHub("123");
const response = {
data: {},
};
@@ -72,8 +66,8 @@ ava_1.default("load empty config", async (t) => {
};
},
});
const config = await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger));
const config = await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logger);
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logger));
});
});
ava_1.default("loading config saves config", async (t) => {
@@ -92,7 +86,7 @@ ava_1.default("loading config saves config", async (t) => {
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// Sanity check that getConfig returns undefined before we have called initConfig
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logger);
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logger);
// The saved config file should now exist
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
// And that same newly-initialised config should now be returned by getConfig
@@ -103,7 +97,7 @@ ava_1.default("loading config saves config", async (t) => {
ava_1.default("load input outside of workspace", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
try {
await configUtils.initConfig(undefined, undefined, "../input", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
await configUtils.initConfig(undefined, undefined, "../input", { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -116,7 +110,7 @@ ava_1.default("load non-local input with invalid repo syntax", async (t) => {
// no filename given, just a repo
const configFile = "octo-org/codeql-config@main";
try {
await configUtils.initConfig(undefined, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
await configUtils.initConfig(undefined, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -130,7 +124,7 @@ ava_1.default("load non-existent input", async (t) => {
const configFile = "input";
t.false(fs.existsSync(path.join(tmpDir, configFile)));
try {
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -169,12 +163,7 @@ ava_1.default("load non-empty input", async (t) => {
// And the config we expect it to parse to
const expectedConfig = {
languages: [languages_1.Language.javascript],
queries: {
javascript: {
builtin: [],
custom: ["/foo/a.ql", "/bar/b.ql"],
},
},
queries: { javascript: ["/foo/a.ql", "/bar/b.ql"] },
pathsIgnore: ["a", "b"],
paths: ["c/d"],
originalUserInput: {
@@ -187,11 +176,10 @@ ava_1.default("load non-empty input", async (t) => {
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: codeQL.getPath(),
gitHubVersion,
};
const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir);
const actualConfig = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
const actualConfig = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Should exactly equal the object we constructed earlier
t.deepEqual(actualConfig, expectedConfig);
});
@@ -220,14 +208,14 @@ ava_1.default("Default queries are used", async (t) => {
});
// The important point of this config is that it doesn't specify
// the disable-default-queries field.
// Any other details are hopefully irrelevant for this test.
// Any other details are hopefully irrelevant for this tetst.
const inputFileContents = `
paths:
- foo`;
fs.mkdirSync(path.join(tmpDir, "foo"));
const languages = "javascript";
const configFilePath = createConfigFile(inputFileContents, tmpDir);
await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolve queries was called correctly
t.deepEqual(resolveQueriesArgs.length, 1);
t.deepEqual(resolveQueriesArgs[0].queries, [
@@ -243,9 +231,9 @@ ava_1.default("Default queries are used", async (t) => {
*/
function queriesToResolvedQueryForm(queries) {
const dummyResolvedQueries = {};
for (const q of queries) {
queries.forEach((q) => {
dummyResolvedQueries[q] = {};
}
});
return {
byLanguage: {
javascript: dummyResolvedQueries,
@@ -270,7 +258,7 @@ ava_1.default("Queries can be specified in config file", async (t) => {
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
const config = await configUtils.initConfig(languages, undefined, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries
// and once for `./foo` from the config file.
@@ -278,10 +266,9 @@ ava_1.default("Queries can be specified in config file", async (t) => {
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/foo$/);
// Now check that the end result contains the default queries and the query from config
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/foo$/);
t.deepEqual(config.queries["javascript"].length, 2);
t.regex(config.queries["javascript"][0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"][1], /.*\/foo$/);
});
});
ava_1.default("Queries from config file can be overridden in workflow file", async (t) => {
@@ -292,7 +279,7 @@ ava_1.default("Queries from config file can be overridden in workflow file", asy
- uses: ./foo`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
// This config item should take precedence over the config file but shouldn't affect the default queries.
const testQueries = "./override";
const queries = "./override";
fs.mkdirSync(path.join(tmpDir, "foo"));
fs.mkdirSync(path.join(tmpDir, "override"));
const resolveQueriesArgs = [];
@@ -303,7 +290,7 @@ ava_1.default("Queries from config file can be overridden in workflow file", asy
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
const config = await configUtils.initConfig(languages, queries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries and once for `./override`,
// but won't be called for './foo' from the config file.
@@ -311,10 +298,9 @@ ava_1.default("Queries from config file can be overridden in workflow file", asy
t.deepEqual(resolveQueriesArgs[1].queries.length, 1);
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override$/);
// Now check that the end result contains only the default queries and the override query
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/override$/);
t.deepEqual(config.queries["javascript"].length, 2);
t.regex(config.queries["javascript"][0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"][1], /.*\/override$/);
});
});
ava_1.default("Queries in workflow file can be used in tandem with the 'disable default queries' option", async (t) => {
@@ -325,7 +311,7 @@ ava_1.default("Queries in workflow file can be used in tandem with the 'disable
name: my config
disable-default-queries: true`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
const testQueries = "./workflow-query";
const queries = "./workflow-query";
fs.mkdirSync(path.join(tmpDir, "workflow-query"));
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
@@ -335,7 +321,7 @@ ava_1.default("Queries in workflow file can be used in tandem with the 'disable
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
const config = await configUtils.initConfig(languages, queries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for `./workflow-query`,
// but won't be called for the default one since that was disabled
@@ -343,16 +329,15 @@ ava_1.default("Queries in workflow file can be used in tandem with the 'disable
t.deepEqual(resolveQueriesArgs[0].queries.length, 1);
t.regex(resolveQueriesArgs[0].queries[0], /.*\/workflow-query$/);
// Now check that the end result contains only the workflow query, and not the default one
t.deepEqual(config.queries["javascript"].builtin.length, 0);
t.deepEqual(config.queries["javascript"].custom.length, 1);
t.regex(config.queries["javascript"].custom[0], /.*\/workflow-query$/);
t.deepEqual(config.queries["javascript"].length, 1);
t.regex(config.queries["javascript"][0], /.*\/workflow-query$/);
});
});
ava_1.default("Multiple queries can be specified in workflow file, no config file required", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
fs.mkdirSync(path.join(tmpDir, "override1"));
fs.mkdirSync(path.join(tmpDir, "override2"));
const testQueries = "./override1,./override2";
const queries = "./override1,./override2";
const resolveQueriesArgs = [];
const codeQL = codeql_1.setCodeQL({
async resolveQueries(queries, extraSearchPath) {
@@ -361,7 +346,7 @@ ava_1.default("Multiple queries can be specified in workflow file, no config fil
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
const config = await configUtils.initConfig(languages, queries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly:
// It'll be called once for the default queries,
// and then once for each of the two queries from the workflow
@@ -371,11 +356,10 @@ ava_1.default("Multiple queries can be specified in workflow file, no config fil
t.regex(resolveQueriesArgs[1].queries[0], /.*\/override1$/);
t.regex(resolveQueriesArgs[2].queries[0], /.*\/override2$/);
// Now check that the end result contains both the queries from the workflow, as well as the defaults
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 2);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/override1$/);
t.regex(config.queries["javascript"].custom[1], /.*\/override2$/);
t.deepEqual(config.queries["javascript"].length, 3);
t.regex(config.queries["javascript"][0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"][1], /.*\/override1$/);
t.regex(config.queries["javascript"][2], /.*\/override2$/);
});
});
ava_1.default("Queries in workflow file can be added to the set of queries without overriding config file", async (t) => {
@@ -388,7 +372,7 @@ ava_1.default("Queries in workflow file can be added to the set of queries witho
- uses: ./foo`;
const configFilePath = createConfigFile(inputFileContents, tmpDir);
// These queries shouldn't override anything, because the value is prefixed with "+"
const testQueries = "+./additional1,./additional2";
const queries = "+./additional1,./additional2";
fs.mkdirSync(path.join(tmpDir, "foo"));
fs.mkdirSync(path.join(tmpDir, "additional1"));
fs.mkdirSync(path.join(tmpDir, "additional2"));
@@ -400,7 +384,7 @@ ava_1.default("Queries in workflow file can be added to the set of queries witho
},
});
const languages = "javascript";
const config = await configUtils.initConfig(languages, testQueries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
const config = await configUtils.initConfig(languages, queries, configFilePath, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
// Check resolveQueries was called correctly
// It'll be called once for the default queries,
// once for each of additional1 and additional2,
@@ -413,12 +397,11 @@ ava_1.default("Queries in workflow file can be added to the set of queries witho
t.deepEqual(resolveQueriesArgs[3].queries.length, 1);
t.regex(resolveQueriesArgs[3].queries[0], /.*\/foo$/);
// Now check that the end result contains all the queries
t.deepEqual(config.queries["javascript"].builtin.length, 1);
t.deepEqual(config.queries["javascript"].custom.length, 3);
t.regex(config.queries["javascript"].builtin[0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"].custom[0], /.*\/additional1$/);
t.regex(config.queries["javascript"].custom[1], /.*\/additional2$/);
t.regex(config.queries["javascript"].custom[2], /.*\/foo$/);
t.deepEqual(config.queries["javascript"].length, 4);
t.regex(config.queries["javascript"][0], /javascript-code-scanning.qls$/);
t.regex(config.queries["javascript"][1], /.*\/additional1$/);
t.regex(config.queries["javascript"][2], /.*\/additional2$/);
t.regex(config.queries["javascript"][3], /.*\/foo$/);
});
});
ava_1.default("Invalid queries in workflow file handled correctly", async (t) => {
@@ -428,7 +411,7 @@ ava_1.default("Invalid queries in workflow file handled correctly", async (t) =>
// This function just needs to be type-correct; it doesn't need to do anything,
// since we're deliberately passing in invalid data
const codeQL = codeql_1.setCodeQL({
async resolveQueries() {
async resolveQueries(_queries, _extraSearchPath) {
return {
byLanguage: {
javascript: {},
@@ -439,7 +422,7 @@ ava_1.default("Invalid queries in workflow file handled correctly", async (t) =>
},
});
try {
await configUtils.initConfig(languages, queries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
await configUtils.initConfig(languages, queries, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
t.fail("initConfig did not throw error");
}
catch (err) {
@@ -482,7 +465,7 @@ ava_1.default("API client used when reading remote config", async (t) => {
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
const configFile = "octo-org/codeql-config/config.yaml@main";
const languages = "javascript";
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
t.assert(spyGetContents.called);
});
});
@@ -492,7 +475,7 @@ ava_1.default("Remote config handles the case where a directory is provided", as
mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -508,7 +491,7 @@ ava_1.default("Invalid format of remote config handled correctly", async (t) =>
mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
await configUtils.initConfig(undefined, undefined, repoReference, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -520,7 +503,7 @@ ava_1.default("No detected languages", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
mockListLanguages([]);
try {
await configUtils.initConfig(undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
await configUtils.initConfig(undefined, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -532,7 +515,7 @@ ava_1.default("Unknown languages", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
const languages = "ruby,english";
try {
await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
await configUtils.initConfig(languages, undefined, undefined, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeql_1.getCachedCodeQL(), tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -557,7 +540,7 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
const inputFile = path.join(tmpDir, configFile);
fs.writeFileSync(inputFile, inputFileContents, "utf8");
try {
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, logging_1.getRunnerLogger(true));
await configUtils.initConfig(languages, undefined, configFile, { owner: "github", repo: "example " }, tmpDir, tmpDir, codeQL, tmpDir, "token", "https://github.example.com", logging_1.getRunnerLogger(true));
throw new Error("initConfig did not throw error");
}
catch (err) {
@@ -611,11 +594,11 @@ ava_1.default("path validations", (t) => {
// Dummy values to pass to validateAndSanitisePath
const propertyName = "paths";
const configFile = "./.github/codeql/config.yml";
for (const validPath of validPaths) {
t.truthy(configUtils.validateAndSanitisePath(validPath, propertyName, configFile, logging_1.getRunnerLogger(true)));
for (const path of validPaths) {
t.truthy(configUtils.validateAndSanitisePath(path, propertyName, configFile, logging_1.getRunnerLogger(true)));
}
for (const invalidPath of invalidPaths) {
t.throws(() => configUtils.validateAndSanitisePath(invalidPath, propertyName, configFile, logging_1.getRunnerLogger(true)));
for (const path of invalidPaths) {
t.throws(() => configUtils.validateAndSanitisePath(path, propertyName, configFile, logging_1.getRunnerLogger(true)));
}
});
ava_1.default("path sanitisation", (t) => {

File diff suppressed because one or more lines are too long

View File

@@ -1,3 +1,3 @@
{
"bundleVersion": "codeql-bundle-20210308"
"bundleVersion": "codeql-bundle-20200826"
}

View File

@@ -7,14 +7,13 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
/**
* Check out repository at the given ref, and return the directory of the checkout.
*/
async function checkoutExternalRepository(repository, ref, apiDetails, tempDir, logger) {
async function checkoutExternalRepository(repository, ref, githubUrl, tempDir, logger) {
logger.info(`Checking out ${repository}`);
const checkoutLocation = path.join(tempDir, repository, ref);
if (!checkoutLocation.startsWith(tempDir)) {
@@ -22,13 +21,13 @@ async function checkoutExternalRepository(repository, ref, apiDetails, tempDir,
throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`);
}
if (!fs.existsSync(checkoutLocation)) {
const repoCloneURL = buildCheckoutURL(repository, apiDetails);
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
const repoURL = `${githubUrl}/${repository}`;
await new toolrunnner.ToolRunner("git", [
"clone",
repoCloneURL,
repoURL,
checkoutLocation,
]).exec();
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
await new toolrunnner.ToolRunner("git", [
`--work-tree=${checkoutLocation}`,
`--git-dir=${checkoutLocation}/.git`,
"checkout",
@@ -38,17 +37,4 @@ async function checkoutExternalRepository(repository, ref, apiDetails, tempDir,
return checkoutLocation;
}
exports.checkoutExternalRepository = checkoutExternalRepository;
function buildCheckoutURL(repository, apiDetails) {
const repoCloneURL = new URL(apiDetails.url);
if (apiDetails.externalRepoAuth !== undefined) {
repoCloneURL.username = "x-access-token";
repoCloneURL.password = apiDetails.externalRepoAuth;
}
if (!repoCloneURL.pathname.endsWith("/")) {
repoCloneURL.pathname += "/";
}
repoCloneURL.pathname += `${repository}`;
return repoCloneURL.toString();
}
exports.buildCheckoutURL = buildCheckoutURL;
//# sourceMappingURL=external-queries.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAKpD;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,UAAwC,EACxC,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,YAAY,GAAG,gBAAgB,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAC9D,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,OAAO;YACP,YAAY;YACZ,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC;AAED,SAAgB,gBAAgB,CAC9B,UAAkB,EAClB,UAAwC;IAExC,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC7C,IAAI,UAAU,CAAC,gBAAgB,KAAK,SAAS,EAAE;QAC7C,YAAY,CAAC,QAAQ,GAAG,gBAAgB,CAAC;QACzC,YAAY,CAAC,QAAQ,GAAG,UAAU,CAAC,gBAAgB,CAAC;KACrD;IACD,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;QACxC,YAAY,CAAC,QAAQ,IAAI,GAAG,CAAC;KAC9B;IACD,YAAY,CAAC,QAAQ,IAAI,GAAG,UAAU,EAAE,CAAC;IACzC,OAAO,YAAY,CAAC,QAAQ,EAAE,CAAC;AACjC,CAAC;AAdD,4CAcC"}
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,0EAA4D;AAC5D,uCAAyB;AACzB,2CAA6B;AAI7B;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,SAAiB,EACjB,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,OAAO,GAAG,GAAG,SAAS,IAAI,UAAU,EAAE,CAAC;QAC7C,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE;YACtC,OAAO;YACP,OAAO;YACP,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE;YACtC,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC"}

View File

@@ -10,11 +10,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
const ava_1 = __importDefault(require("ava"));
const externalQueries = __importStar(require("./external-queries"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
@@ -44,7 +43,7 @@ ava_1.default("checkoutExternalQueries", async (t) => {
];
console.log(`Running: git ${command.join(" ")}`);
try {
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), command, {
await new toolrunnner.ToolRunner("git", command, {
silent: true,
listeners: {
stdout: (data) => {
@@ -67,7 +66,6 @@ ava_1.default("checkoutExternalQueries", async (t) => {
await runGit(["init", repoPath]);
await runGit(["config", "user.email", "test@github.com"]);
await runGit(["config", "user.name", "Test Test"]);
await runGit(["config", "commit.gpgsign", "false"]);
fs.writeFileSync(path.join(repoPath, "a"), "a content");
await runGit(["add", "a"]);
await runGit(["commit", "-m", "commit1"]);
@@ -81,35 +79,17 @@ ava_1.default("checkoutExternalQueries", async (t) => {
const commit2Sha = await runGit(["rev-parse", "HEAD"]);
// Checkout the first commit, which should contain 'a' and 'b'
t.false(fs.existsSync(path.join(tmpDir, repoName)));
await externalQueries.checkoutExternalRepository(repoName, commit1Sha, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" }, tmpDir, logging_1.getRunnerLogger(true));
await externalQueries.checkoutExternalRepository(repoName, commit1Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true));
t.true(fs.existsSync(path.join(tmpDir, repoName)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "a")));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit1Sha, "b")));
// Checkout the second commit as well, which should only contain 'a'
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
await externalQueries.checkoutExternalRepository(repoName, commit2Sha, { url: `file://${testRepoBaseDir}`, externalRepoAuth: "" }, tmpDir, logging_1.getRunnerLogger(true));
await externalQueries.checkoutExternalRepository(repoName, commit2Sha, `file://${testRepoBaseDir}`, tmpDir, logging_1.getRunnerLogger(true));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha)));
t.true(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "a")));
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
});
});
ava_1.default("buildCheckoutURL", (t) => {
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
url: "https://github.com",
externalRepoAuth: undefined,
}), "https://github.com/foo/bar");
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
url: "https://github.example.com/",
externalRepoAuth: undefined,
}), "https://github.example.com/foo/bar");
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
url: "https://github.com",
externalRepoAuth: "abc",
}), "https://x-access-token:abc@github.com/foo/bar");
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
url: "https://github.example.com/",
externalRepoAuth: "abc",
}), "https://x-access-token:abc@github.example.com/foo/bar");
});
//# sourceMappingURL=external-queries.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,EAAE;IAC7B,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,4BAA4B,CAC7B,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,oCAAoC,CACrC,CAAC;IAEF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,+CAA+C,CAChD,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,uDAAuD,CACxD,CAAC;AACJ,CAAC,CAAC,CAAC"}
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,0EAA4D;AAC5D,8CAAuB;AACvB,uCAAyB;AACzB,2CAA6B;AAE7B,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,KAAK,EAAE,OAAO,EAAE;oBAC/C,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CAAC,CAAC,IAAI,EAAE,CAAC;aACX;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QAEnD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,UAAU,eAAe,EAAE,EAC3B,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

26
lib/fingerprints.js generated
View File

@@ -46,7 +46,7 @@ function hash(callback, input) {
// Indexes match up with those from the window variable.
const lineNumbers = Array(BLOCK_SIZE).fill(-1);
// The current hash value, updated as we read each character
let hashRaw = long_1.default.ZERO;
let hash = long_1.default.ZERO;
const firstMod = computeFirstMod();
// The current index in the window, will wrap around to zero when we reach BLOCK_SIZE
let index = 0;
@@ -61,7 +61,7 @@ function hash(callback, input) {
const hashCounts = {};
// Output the current hash and line number to the callback function
const outputHash = function () {
const hashValue = hashRaw.toUnsigned().toString(16);
const hashValue = hash.toUnsigned().toString(16);
if (!hashCounts[hashValue]) {
hashCounts[hashValue] = 0;
}
@@ -73,7 +73,7 @@ function hash(callback, input) {
const updateHash = function (current) {
const begin = window[index];
window[index] = current;
hashRaw = MOD.multiply(hashRaw)
hash = MOD.multiply(hash)
.add(long_1.default.fromInt(current))
.subtract(firstMod.multiply(long_1.default.fromInt(begin)));
index = (index + 1) % BLOCK_SIZE;
@@ -120,7 +120,7 @@ function hash(callback, input) {
}
exports.hash = hash;
// Generate a hash callback function that updates the given result in-place
// when it receives a hash for the correct line number. Ignores hashes for other lines.
// when it recieves a hash for the correct line number. Ignores hashes for other lines.
function locationUpdateCallback(result, location, logger) {
var _a, _b;
let locationStartLine = (_b = (_a = location.physicalLocation) === null || _a === void 0 ? void 0 : _a.region) === null || _b === void 0 ? void 0 : _b.startLine;
@@ -130,7 +130,7 @@ function locationUpdateCallback(result, location, logger) {
// using the hash of the first line of the file.
locationStartLine = 1;
}
return function (lineNumber, hashValue) {
return function (lineNumber, hash) {
// Ignore hashes for lines that don't concern us
if (locationStartLine !== lineNumber) {
return;
@@ -142,10 +142,10 @@ function locationUpdateCallback(result, location, logger) {
// If the hash doesn't match the existing fingerprint then
// output a warning and don't overwrite it.
if (!existingFingerprint) {
result.partialFingerprints.primaryLocationLineHash = hashValue;
result.partialFingerprints.primaryLocationLineHash = hash;
}
else if (existingFingerprint !== hashValue) {
logger.warning(`Calculated fingerprint of ${hashValue} for file ${location.physicalLocation.artifactLocation.uri} line ${lineNumber}, but found existing inconsistent fingerprint value ${existingFingerprint}`);
else if (existingFingerprint !== hash) {
logger.warning(`Calculated fingerprint of ${hash} for file ${location.physicalLocation.artifactLocation.uri} line ${lineNumber}, but found existing inconsistent fingerprint value ${existingFingerprint}`);
}
};
}
@@ -229,16 +229,14 @@ function addFingerprints(sarifContents, checkoutPath, logger) {
}
}
// Now hash each file that was found
for (const [filepath, callbacks] of Object.entries(callbacksByFile)) {
Object.entries(callbacksByFile).forEach(([filepath, callbacks]) => {
// A callback that forwards the hash to all other callbacks for that file
const teeCallback = function (lineNumber, hashValue) {
for (const c of Object.values(callbacks)) {
c(lineNumber, hashValue);
}
const teeCallback = function (lineNumber, hash) {
Object.values(callbacks).forEach((c) => c(lineNumber, hash));
};
const fileContents = fs.readFileSync(filepath).toString();
hash(teeCallback, fileContents);
}
});
return JSON.stringify(sarif);
}
exports.addFingerprints = addFingerprints;

File diff suppressed because one or more lines are too long

View File

@@ -1,4 +1,7 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
@@ -6,13 +9,10 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const ava_1 = __importDefault(require("ava"));
const fingerprints = __importStar(require("./fingerprints"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
@@ -108,9 +108,7 @@ ava_1.default("hash", (t) => {
});
function testResolveUriToFile(uri, index, artifactsURIs) {
const location = { uri, index };
const artifacts = artifactsURIs.map((artifactURI) => ({
location: { uri: artifactURI },
}));
const artifacts = artifactsURIs.map((uri) => ({ location: { uri } }));
return fingerprints.resolveUriToFile(location, artifacts, process.cwd(), logging_1.getRunnerLogger(true));
}
ava_1.default("resolveUriToFile", (t) => {
@@ -121,13 +119,13 @@ ava_1.default("resolveUriToFile", (t) => {
const cwd = process.cwd();
const filepath = __filename;
t.true(filepath.startsWith(`${cwd}/`));
const relativeFilepath = filepath.substring(cwd.length + 1);
const relativeFilepaht = filepath.substring(cwd.length + 1);
// Absolute paths are unmodified
t.is(testResolveUriToFile(filepath, undefined, []), filepath);
t.is(testResolveUriToFile(`file://${filepath}`, undefined, []), filepath);
// Relative paths are made absolute
t.is(testResolveUriToFile(relativeFilepath, undefined, []), filepath);
t.is(testResolveUriToFile(`file://${relativeFilepath}`, undefined, []), filepath);
t.is(testResolveUriToFile(relativeFilepaht, undefined, []), filepath);
t.is(testResolveUriToFile(`file://${relativeFilepaht}`, undefined, []), filepath);
// Absolute paths outside the src root are discarded
t.is(testResolveUriToFile("/src/foo/bar.js", undefined, []), undefined);
t.is(testResolveUriToFile("file:///src/foo/bar.js", undefined, []), undefined);
@@ -137,7 +135,7 @@ ava_1.default("resolveUriToFile", (t) => {
// Invalid URIs are discarded
t.is(testResolveUriToFile(1, undefined, []), undefined);
t.is(testResolveUriToFile(undefined, undefined, []), undefined);
// Non-existent files are discarded
// Non-existant files are discarded
t.is(testResolveUriToFile(`${filepath}2`, undefined, []), undefined);
// Index is resolved
t.is(testResolveUriToFile(undefined, 0, [filepath]), filepath);

File diff suppressed because one or more lines are too long

86
lib/init-action.js generated
View File

@@ -8,83 +8,50 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const init_1 = require("./init");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const util_1 = require("./util");
async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
var _a;
const statusReportBase = await actionsUtil.createStatusReportBase("init", "success", startedAt);
const util = __importStar(require("./util"));
async function sendSuccessStatusReport(startedAt, config) {
const statusReportBase = await util.createStatusReportBase("init", "success", startedAt);
const languages = config.languages.join(",");
const workflowLanguages = actionsUtil.getOptionalInput("languages");
const workflowLanguages = core.getInput("languages", { required: false });
const paths = (config.originalUserInput.paths || []).join(",");
const pathsIgnore = (config.originalUserInput["paths-ignore"] || []).join(",");
const disableDefaultQueries = config.originalUserInput["disable-default-queries"]
? languages
: "";
const queries = [];
let queriesInput = (_a = actionsUtil.getOptionalInput("queries")) === null || _a === void 0 ? void 0 : _a.trim();
if (queriesInput === undefined || queriesInput.startsWith("+")) {
queries.push(...(config.originalUserInput.queries || []).map((q) => q.uses));
}
if (queriesInput !== undefined) {
queriesInput = queriesInput.startsWith("+")
? queriesInput.substr(1)
: queriesInput;
queries.push(...queriesInput.split(","));
}
const queries = (config.originalUserInput.queries || [])
.map((q) => q.uses)
.join(",");
const statusReport = {
...statusReportBase,
languages,
workflow_languages: workflowLanguages || "",
workflow_languages: workflowLanguages,
paths,
paths_ignore: pathsIgnore,
disable_default_queries: disableDefaultQueries,
queries: queries.join(","),
tools_input: actionsUtil.getOptionalInput("tools") || "",
tools_resolved_version: toolsVersion,
queries,
};
await actionsUtil.sendStatusReport(statusReport);
await util.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
const logger = logging_1.getActionsLogger();
let config;
let codeql;
let toolsVersion;
const apiDetails = {
auth: actionsUtil.getRequiredInput("token"),
externalRepoAuth: actionsUtil.getOptionalInput("external-repository-token"),
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
};
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
util_1.checkGitHubVersionInRange(gitHubVersion, "actions", logger);
try {
actionsUtil.prepareLocalRunEnvironment();
const workflowErrors = await actionsUtil.validateWorkflow();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "starting", startedAt, workflowErrors)))) {
util.prepareLocalRunEnvironment();
if (!(await util.sendStatusReport(await util.createStatusReportBase("init", "starting", startedAt), true))) {
return;
}
const initCodeQLResult = await init_1.initCodeQL(actionsUtil.getOptionalInput("tools"), apiDetails, actionsUtil.getTemporaryDirectory(), "actions", gitHubVersion.type, logger);
codeql = initCodeQLResult.codeql;
toolsVersion = initCodeQLResult.toolsVersion;
config = await init_1.initConfig(actionsUtil.getOptionalInput("languages"), actionsUtil.getOptionalInput("queries"), actionsUtil.getOptionalInput("config-file"), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), actionsUtil.getTemporaryDirectory(), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), codeql, actionsUtil.getRequiredEnvParam("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, logger);
if (config.languages.includes(languages_1.Language.python) &&
actionsUtil.getRequiredInput("setup-python-dependencies") === "true") {
try {
await init_1.installPythonDeps(codeql, logger);
}
catch (err) {
logger.warning(`${err.message} You can call this action with 'setup-python-dependencies: false' to disable this process`);
}
}
codeql = await init_1.initCodeQL(util.getOptionalInput("tools"), util.getRequiredInput("token"), util.getRequiredEnvParam("GITHUB_SERVER_URL"), util.getRequiredEnvParam("RUNNER_TEMP"), util.getRequiredEnvParam("RUNNER_TOOL_CACHE"), "actions", logger);
config = await init_1.initConfig(util.getOptionalInput("languages"), util.getOptionalInput("queries"), util.getOptionalInput("config-file"), repository_1.parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")), util.getRequiredEnvParam("RUNNER_TEMP"), util.getRequiredEnvParam("RUNNER_TOOL_CACHE"), codeql, util.getRequiredEnvParam("GITHUB_WORKSPACE"), util.getRequiredInput("token"), util.getRequiredEnvParam("GITHUB_SERVER_URL"), logger);
}
catch (e) {
core.setFailed(e.message);
console.log(e);
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "aborted", startedAt, e.message));
await util.sendStatusReport(await util.createStatusReportBase("init", "aborted", startedAt, e.message));
return;
}
try {
@@ -99,31 +66,22 @@ async function run() {
core.exportVariable("CODEQL_RAM", codeqlRam);
const tracerConfig = await init_1.runInit(codeql, config);
if (tracerConfig !== undefined) {
for (const [key, value] of Object.entries(tracerConfig.env)) {
core.exportVariable(key, value);
}
Object.entries(tracerConfig.env).forEach(([key, value]) => core.exportVariable(key, value));
if (process.platform === "win32") {
await init_1.injectWindowsTracer("Runner.Worker.exe", undefined, config, codeql, tracerConfig);
}
}
core.setOutput("codeql-path", config.codeQLCmd);
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "failure", startedAt, error.message, error.stack));
await util.sendStatusReport(await util.createStatusReportBase("init", "failure", startedAt, error.message, error.stack));
return;
}
await sendSuccessStatusReport(startedAt, config, toolsVersion);
await sendSuccessStatusReport(startedAt, config);
}
async function runWrapper() {
try {
await run();
}
catch (error) {
core.setFailed(`init action failed: ${error}`);
console.log(error);
}
}
void runWrapper();
run().catch((e) => {
core.setFailed(`init action failed: ${e}`);
console.log(e);
});
//# sourceMappingURL=init-action.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"init-action.js","sourceRoot":"","sources":["../src/init-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAG9C,iCAMgB;AAChB,2CAAuC;AACvC,uCAA6C;AAC7C,6CAAkD;AAClD,iCAAqE;AAsBrE,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,MAA0B,EAC1B,YAAoB;;IAEpB,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,MAAM,EACN,SAAS,EACT,SAAS,CACV,CAAC;IAEF,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC7C,MAAM,iBAAiB,GAAG,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAC;IACpE,MAAM,KAAK,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CACvE,GAAG,CACJ,CAAC;IACF,MAAM,qBAAqB,GAAG,MAAM,CAAC,iBAAiB,CACpD,yBAAyB,CAC1B;QACC,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,EAAE,CAAC;IAEP,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,SAAG,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,0CAAE,IAAI,EAAE,CAAC;IACnE,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;QAC9D,OAAO,CAAC,IAAI,CACV,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,OAAO,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAC/D,CAAC;KACH;IACD,IAAI,YAAY,KAAK,SAAS,EAAE;QAC9B,YAAY,GAAG,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC;YACzC,CAAC,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC;YACxB,CAAC,CAAC,YAAY,CAAC;QACjB,OAAO,CAAC,IAAI,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;KAC1C;IAED,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,SAAS;QACT,kBAAkB,EAAE,iBAAiB,IAAI,EAAE;QAC3C,KAAK;QACL,YAAY,EAAE,WAAW;QACzB,uBAAuB,EAAE,qBAAqB;QAC9C,OAAO,EAAE,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC;QAC1B,WAAW,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,IAAI,EAAE;QACxD,sBAAsB,EAAE,YAAY;KACrC,CAAC;IAEF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,IAAI,MAA0B,CAAC;IAC/B,IAAI,MAAc,CAAC;IACnB,IAAI,YAAoB,CAAC;IAEzB,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;QAC3C,gBAAgB,EAAE,WAAW,CAAC,gBAAgB,CAAC,2BAA2B,CAAC;QAC3E,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;KAC1D,CAAC;IAEF,MAAM,aAAa,GAAG,MAAM,uBAAgB,CAAC,UAAU,CAAC,CAAC;IACzD,gCAAyB,CAAC,aAAa,EAAE,SAAS,EAAE,MAAM,CAAC,CAAC;IAE5D,IAAI;QACF,WAAW,CAAC,0BAA0B,EAAE,CAAC;QAEzC,MAAM,cAAc,GAAG,MAAM,WAAW,CAAC,gBAAgB,EAAE,CAAC;QAE5D,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,UAAU,EACV,SAAS,EACT,cAAc,CACf,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,gBAAgB,GAAG,MAAM,iBAAU,CACvC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,UAAU,EACV,WAAW,CAAC,qBAAqB,EAAE,EACnC,SAAS,EACT,aAAa,CAAC,IAAI,EAClB,MAAM,CACP,CAAC;QACF,MAAM,GAAG,gBAAgB,CAAC,MAAM,CAAC;QACjC,YAAY,GAAG,gBAAgB,CAAC,YAAY,CAAC;QAE7C,MAAM,GAAG,MAAM,iBAAU,CACvB,WAAW,CAAC,gBAAgB,CAAC,WAAW,CAAC,EACzC,WAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,EACvC,WAAW,CAAC,gBAAgB,CAAC,aAAa,CAAC,EAC3C,+BAAkB,CAAC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACxE,WAAW,CAAC,qBAAqB,EAAE,EACnC,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EACpD,MAAM,EACN,WAAW,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,EACnD,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;QAEF,IACE,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;YAC1C,WAAW,CAAC,gBAAgB,CAAC,2BAA2B,CAAC,KAAK,MAAM,EACpE;YACA,IAAI;gBACF,MAAM,wBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;aACzC;YAAC,OAAO,GAAG,EAAE;gBACZ,MAAM,CAAC,OAAO,CACZ,GAAG,GAAG,CAAC,OAAO,2FAA2F,CAC1G,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,CAAC,CAAC,OAAO,CACV,CACF,CAAC;QACF,OAAO;KACR;IAED,IAAI;QACF,mBAAmB;QACnB,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QACvC,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YACxC,IAAI,CAAC,OAAO,CACV,6GAA6G,CAC9G,CAAC;SACH;QAED,mGAAmG;QACnG,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,MAAM,CAAC;QACtD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;QAE7C,MAAM,YAAY,GAAG,MAAM,cAAO,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACnD,IAAI,YAAY,KAAK,SAAS,EAAE;YAC9B,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,EAAE;gBAC3D,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;aACjC;YAED,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,0BAAmB,CACvB,mBAAmB,EACnB,SAAS,EACT,MAAM,EACN,MAAM,EACN,YAAY,CACb,CAAC;aACH;SACF;QAED,IAAI,CAAC,SAAS,CAAC,aAAa,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;KACjD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,MAAM,EACN,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;AACjE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,uBAAuB,KAAK,EAAE,CAAC,CAAC;QAC/C,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"init-action.js","sourceRoot":"","sources":["../src/init-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAItC,iCAA8E;AAC9E,uCAA6C;AAC7C,6CAAkD;AAClD,6CAA+B;AAkB/B,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,MAA0B;IAE1B,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,MAAM,EACN,SAAS,EACT,SAAS,CACV,CAAC;IAEF,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC7C,MAAM,iBAAiB,GAAG,IAAI,CAAC,QAAQ,CAAC,WAAW,EAAE,EAAE,QAAQ,EAAE,KAAK,EAAE,CAAC,CAAC;IAC1E,MAAM,KAAK,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,WAAW,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,IAAI,CACvE,GAAG,CACJ,CAAC;IACF,MAAM,qBAAqB,GAAG,MAAM,CAAC,iBAAiB,CACpD,yBAAyB,CAC1B;QACC,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,EAAE,CAAC;IACP,MAAM,OAAO,GAAG,CAAC,MAAM,CAAC,iBAAiB,CAAC,OAAO,IAAI,EAAE,CAAC;SACrD,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;SAClB,IAAI,CAAC,GAAG,CAAC,CAAC;IAEb,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,SAAS;QACT,kBAAkB,EAAE,iBAAiB;QACrC,KAAK;QACL,YAAY,EAAE,WAAW;QACzB,uBAAuB,EAAE,qBAAqB;QAC9C,OAAO;KACR,CAAC;IAEF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,0BAAgB,EAAE,CAAC;IAClC,IAAI,MAA0B,CAAC;IAC/B,IAAI,MAAc,CAAC;IAEnB,IAAI;QACF,IAAI,CAAC,0BAA0B,EAAE,CAAC;QAClC,IACE,CAAC,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAC3B,MAAM,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,UAAU,EAAE,SAAS,CAAC,EAChE,IAAI,CACL,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,GAAG,MAAM,iBAAU,CACvB,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC,EAC9B,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC,EAC9B,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EACvC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,SAAS,EACT,MAAM,CACP,CAAC;QACF,MAAM,GAAG,MAAM,iBAAU,CACvB,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC,EAClC,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC,EAChC,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC,EACpC,+BAAkB,CAAC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACjE,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAC,EACvC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,MAAM,EACN,IAAI,CAAC,mBAAmB,CAAC,kBAAkB,CAAC,EAC5C,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC,EAC9B,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,MAAM,CACP,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,MAAM,IAAI,CAAC,gBAAgB,CACzB,MAAM,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,EAAE,CAAC,CAAC,OAAO,CAAC,CAC3E,CAAC;QACF,OAAO;KACR;IAED,IAAI;QACF,mBAAmB;QACnB,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;QACvC,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;YACxC,IAAI,CAAC,OAAO,CACV,6GAA6G,CAC9G,CAAC;SACH;QAED,mGAAmG;QACnG,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,MAAM,CAAC;QACtD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;QAE7C,MAAM,YAAY,GAAG,MAAM,cAAO,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACnD,IAAI,YAAY,KAAK,SAAS,EAAE;YAC9B,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CACxD,IAAI,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,CAAC,CAChC,CAAC;YAEF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,0BAAmB,CACvB,mBAAmB,EACnB,SAAS,EACT,MAAM,EACN,MAAM,EACN,YAAY,CACb,CAAC;aACH;SACF;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAI,CAAC,gBAAgB,CACzB,MAAM,IAAI,CAAC,sBAAsB,CAC/B,MAAM,EACN,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;AACnD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,EAAE,CAAC,CAAC;IAC3C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

64
lib/init.js generated
View File

@@ -7,26 +7,25 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
const analysisPaths = __importStar(require("./analysis-paths"));
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const tracer_config_1 = require("./tracer-config");
const util = __importStar(require("./util"));
async function initCodeQL(codeqlURL, apiDetails, tempDir, mode, variant, logger) {
async function initCodeQL(codeqlURL, githubAuth, githubUrl, tempDir, toolsDir, mode, logger) {
logger.startGroup("Setup CodeQL tools");
const { codeql, toolsVersion } = await codeql_1.setupCodeQL(codeqlURL, apiDetails, tempDir, mode, variant, logger);
const codeql = await codeql_1.setupCodeQL(codeqlURL, githubAuth, githubUrl, tempDir, toolsDir, mode, logger);
await codeql.printVersion();
logger.endGroup();
return { codeql, toolsVersion };
return codeql;
}
exports.initCodeQL = initCodeQL;
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger) {
async function initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger) {
logger.startGroup("Load language configuration");
const config = await configUtils.initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, gitHubVersion, apiDetails, logger);
const config = await configUtils.initConfig(languagesInput, queriesInput, configFile, repository, tempDir, toolCacheDir, codeQL, checkoutPath, githubAuth, githubUrl, logger);
analysisPaths.printPathFiltersWarning(config, logger);
logger.endGroup();
return config;
@@ -103,10 +102,6 @@ async function injectWindowsTracer(processName, processLevel, config, codeql, tr
Write-Host "Found Runner.Worker.exe process which means we are running on GitHub Actions"
Write-Host "Aborting search early and using process: $p"
Break
} elseif ($p[0].Name -eq "Agent.Worker.exe") {
Write-Host "Found Agent.Worker.exe process which means we are running on Azure Pipelines"
Write-Host "Aborting search early and using process: $p"
Break
} else {
$id = $p[0].ParentProcessId
}
@@ -117,7 +112,7 @@ async function injectWindowsTracer(processName, processLevel, config, codeql, tr
}
const injectTracerPath = path.join(config.tempDir, "inject-tracer.ps1");
fs.writeFileSync(injectTracerPath, script);
await new toolrunner.ToolRunner(await safeWhich.safeWhich("powershell"), [
await new toolrunnner.ToolRunner("powershell", [
"-ExecutionPolicy",
"Bypass",
"-file",
@@ -126,49 +121,4 @@ async function injectWindowsTracer(processName, processLevel, config, codeql, tr
], { env: { ODASA_TRACER_CONFIGURATION: tracerConfig.spec } }).exec();
}
exports.injectWindowsTracer = injectWindowsTracer;
async function installPythonDeps(codeql, logger) {
logger.startGroup("Setup Python dependencies");
const scriptsFolder = path.resolve(__dirname, "../python-setup");
// Setup tools on the GitHub hosted runners
if (process.env["ImageOS"] !== undefined) {
try {
if (process.platform === "win32") {
await new toolrunner.ToolRunner(await safeWhich.safeWhich("powershell"), [path.join(scriptsFolder, "install_tools.ps1")]).exec();
}
else {
await new toolrunner.ToolRunner(path.join(scriptsFolder, "install_tools.sh")).exec();
}
}
catch (e) {
// This script tries to install some needed tools in the runner. It should not fail, but if it does
// we just abort the process without failing the action
logger.endGroup();
logger.warning("Unable to download and extract the tools needed for installing the python dependencies. You can call this action with 'setup-python-dependencies: false' to disable this process.");
return;
}
}
// Install dependencies
try {
const script = "auto_install_packages.py";
if (process.platform === "win32") {
await new toolrunner.ToolRunner(await safeWhich.safeWhich("py"), [
"-3",
path.join(scriptsFolder, script),
path.dirname(codeql.getPath()),
]).exec();
}
else {
await new toolrunner.ToolRunner(path.join(scriptsFolder, script), [
path.dirname(codeql.getPath()),
]).exec();
}
}
catch (e) {
logger.endGroup();
logger.warning("We were unable to install your python dependencies. You can call this action with 'setup-python-dependencies: false' to disable this process.");
return;
}
logger.endGroup();
}
exports.installPythonDeps = installPythonDeps;
//# sourceMappingURL=init.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,IAAe,EACf,OAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,oBAAW,CAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,IAAI,EACJ,OAAO,EACP,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AApBD,gCAoBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,aAAiC,EACjC,UAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,aAAa,EACb,UAAU,EACV,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9BD,gCA8BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnBD,0BAmBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,2CAA2C;IAC3C,IAAI,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,KAAK,SAAS,EAAE;QACxC,IAAI;YACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;gBAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC,CAAC,CAChD,CAAC,IAAI,EAAE,CAAC;aACV;iBAAM;gBACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;aACV;SACF;QAAC,OAAO,CAAC,EAAE;YACV,mGAAmG;YACnG,uDAAuD;YACvD,MAAM,CAAC,QAAQ,EAAE,CAAC;YAClB,MAAM,CAAC,OAAO,CACZ,mLAAmL,CACpL,CAAC;YACF,OAAO;SACR;KACF;IAED,uBAAuB;IACvB,IAAI;QACF,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,EAAE;gBAChE,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,+IAA+I,CAChJ,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAnDD,8CAmDC"}
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;AAAA,0EAA4D;AAC5D,uCAAyB;AACzB,2CAA6B;AAE7B,gEAAkD;AAClD,qCAA+C;AAC/C,4DAA8C;AAG9C,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAAkB,EAClB,SAAiB,EACjB,OAAe,EACf,QAAgB,EAChB,IAAe,EACf,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,MAAM,GAAG,MAAM,oBAAW,CAC9B,SAAS,EACT,UAAU,EACV,SAAS,EACT,OAAO,EACP,QAAQ,EACR,IAAI,EACJ,MAAM,CACP,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAtBD,gCAsBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,UAAyB,EACzB,OAAe,EACf,YAAoB,EACpB,MAAc,EACd,YAAoB,EACpB,UAAkB,EAClB,SAAiB,EACjB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,UAAU,EACV,OAAO,EACP,YAAY,EACZ,MAAM,EACN,YAAY,EACZ,UAAU,EACV,SAAS,EACT,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9BD,gCA8BC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B;IAE1B,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC;IAElC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE9E,sEAAsE;IACtE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,yBAAyB;QACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,EACpD,QAAQ,EACR,UAAU,CACX,CAAC;KACH;IAED,OAAO,MAAM,uCAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAnBD,0BAmBC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;gDAiBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,WAAW,CAAC,UAAU,CAC9B,YAAY,EACZ;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AAxFD,kDAwFC"}

4
lib/languages.js generated
View File

@@ -33,9 +33,7 @@ function parseLanguage(language) {
}
exports.parseLanguage = parseLanguage;
function isTracedLanguage(language) {
return (["cpp", "java", "csharp"].includes(language) ||
(process.env["CODEQL_EXTRACTOR_GO_BUILD_TRACING"] === "on" &&
language === Language.go));
return ["cpp", "java", "csharp"].includes(language);
}
exports.isTracedLanguage = isTracedLanguage;
function isScannedLanguage(language) {

View File

@@ -1 +1 @@
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;AAAA,wCAAwC;AACxC,IAAY,QAOX;AAPD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;AACnB,CAAC,EAPW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAOnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAAiC;IACrD,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CACL,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC;QAC5C,CAAC,OAAO,CAAC,GAAG,CAAC,mCAAmC,CAAC,KAAK,IAAI;YACxD,QAAQ,KAAK,QAAQ,CAAC,EAAE,CAAC,CAC5B,CAAC;AACJ,CAAC;AAND,4CAMC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;AAAA,wCAAwC;AACxC,IAAY,QAOX;AAPD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;AACnB,CAAC,EAPW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAOnB;AAED,iCAAiC;AACjC,MAAM,gBAAgB,GAAiC;IACrD,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF,gGAAgG;AAChG,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;IAElC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,yBAAyB;IACzB,IAAI,QAAQ,IAAI,gBAAgB,EAAE;QAChC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,CAAC;KACnC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAfD,sCAeC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACtD,CAAC;AAFD,4CAEC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}

2
lib/languages.test.js generated
View File

@@ -7,7 +7,7 @@ const ava_1 = __importDefault(require("ava"));
const languages_1 = require("./languages");
const testing_utils_1 = require("./testing-utils");
testing_utils_1.setupTests(ava_1.default);
ava_1.default("parseLanguage", async (t) => {
ava_1.default("parseLangauge", async (t) => {
// Exact matches
t.deepEqual(languages_1.parseLanguage("csharp"), languages_1.Language.csharp);
t.deepEqual(languages_1.parseLanguage("cpp"), languages_1.Language.cpp);

79
lib/runner.js generated
View File

@@ -7,10 +7,10 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const commander_1 = require("commander");
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
const commander_1 = require("commander");
const analyze_1 = require("./analyze");
const autobuild_1 = require("./autobuild");
const codeql_1 = require("./codeql");
@@ -23,6 +23,24 @@ const upload_lib = __importStar(require("./upload-lib"));
const util_1 = require("./util");
const program = new commander_1.Command();
program.version("0.0.1");
function parseGithubUrl(inputUrl) {
try {
const url = new URL(inputUrl);
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
return "https://github.com";
}
// Remove the API prefix if it's present
if (url.pathname.indexOf("/api/v3") !== -1) {
url.pathname = url.pathname.substring(0, url.pathname.indexOf("/api/v3"));
}
return url.toString();
}
catch (e) {
throw new Error(`"${inputUrl}" is not a valid URL`);
}
}
function getTempDir(userInput) {
const tempDir = path.join(userInput || process.cwd(), "codeql-runner");
if (!fs.existsSync(tempDir)) {
@@ -43,9 +61,7 @@ function importTracerEnvironment(config) {
if (!("ODASA_TRACER_CONFIGURATION" in process.env)) {
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
const env = JSON.parse(fs.readFileSync(jsonEnvFile).toString("utf-8"));
for (const key of Object.keys(env)) {
process.env[key] = env[key];
}
Object.keys(env).forEach((key) => (process.env[key] = env[key]));
}
}
// Allow the user to specify refs in full refs/heads/branch format
@@ -82,8 +98,7 @@ program
.description("Initializes CodeQL")
.requiredOption("--repository <repository>", "Repository name. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.option("--github-auth <auth>", "GitHub Apps token or personal access token. This option is insecure and deprecated, please use `--github-auth-stdin` instead.")
.option("--github-auth-stdin", "Read GitHub Apps token or personal access token from stdin.")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--languages <languages>", "Comma-separated list of languages to analyze. Otherwise detects and analyzes all supported languages from the repo.")
.option("--queries <queries>", "Comma-separated list of additional queries to run. This overrides the same setting in a configuration file.")
.option("--config-file <file>", "Path to config file.")
@@ -100,27 +115,18 @@ program
try {
const tempDir = getTempDir(cmd.tempDir);
const toolsDir = getToolsDir(cmd.toolsDir);
util_1.setupActionsVars(tempDir, toolsDir);
// Wipe the temp dir
logger.info(`Cleaning temp directory ${tempDir}`);
fs.rmdirSync(tempDir, { recursive: true });
fs.mkdirSync(tempDir, { recursive: true });
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
const apiDetails = {
auth,
externalRepoAuth: auth,
url: util_1.parseGitHubUrl(cmd.githubUrl),
};
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
util_1.checkGitHubVersionInRange(gitHubVersion, "runner", logger);
let codeql;
if (cmd.codeqlPath !== undefined) {
codeql = codeql_1.getCodeQL(cmd.codeqlPath);
}
else {
codeql = (await init_1.initCodeQL(undefined, apiDetails, tempDir, "runner", gitHubVersion.type, logger)).codeql;
codeql = await init_1.initCodeQL(undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), tempDir, toolsDir, "runner", logger);
}
const config = await init_1.initConfig(cmd.languages, cmd.queries, cmd.configFile, repository_1.parseRepositoryNwo(cmd.repository), tempDir, toolsDir, codeql, cmd.checkoutPath || process.cwd(), gitHubVersion, apiDetails, logger);
const config = await init_1.initConfig(cmd.languages, cmd.queries, cmd.configFile, repository_1.parseRepositoryNwo(cmd.repository), tempDir, toolsDir, codeql, cmd.checkoutPath || process.cwd(), cmd.githubAuth, parseGithubUrl(cmd.githubUrl), logger);
const tracerConfig = await init_1.runInit(codeql, config);
if (tracerConfig === undefined) {
return;
@@ -128,7 +134,7 @@ program
if (process.platform === "win32") {
await init_1.injectWindowsTracer(parseTraceProcessName(), parseTraceProcessLevel(), config, codeql, tracerConfig);
}
// Always output a json file of the env that can be consumed programmatically
// Always output a json file of the env that can be consumed programatically
const jsonEnvFile = path.join(config.tempDir, codeqlEnvJsonFilename);
fs.writeFileSync(jsonEnvFile, JSON.stringify(tracerConfig.env));
if (process.platform === "win32") {
@@ -143,7 +149,7 @@ program
.join("\n");
fs.writeFileSync(powershellEnvFile, powershellEnvFileContents);
logger.info(`\nCodeQL environment output to "${jsonEnvFile}", "${batEnvFile}" and "${powershellEnvFile}". ` +
`Please export these variables to future processes so that CodeQL can monitor the build. ` +
`Please export these variables to future processes so the build can be traced. ` +
`If using cmd/batch run "call ${batEnvFile}" ` +
`or if using PowerShell run "cat ${powershellEnvFile} | Invoke-Expression".`);
}
@@ -152,11 +158,11 @@ program
const shEnvFile = path.join(config.tempDir, "codeql-env.sh");
const shEnvFileContents = Object.entries(tracerConfig.env)
// Some vars contain ${LIB} that we do not want to be expanded when executing this script
.map(([key, value]) => `export ${key}="${value.replace(/\$/g, "\\$")}"`)
.map(([key, value]) => `export ${key}="${value.replace("$", "\\$")}"`)
.join("\n");
fs.writeFileSync(shEnvFile, shEnvFileContents);
logger.info(`\nCodeQL environment output to "${jsonEnvFile}" and "${shEnvFile}". ` +
`Please export these variables to future processes so that CodeQL can monitor the build, ` +
`Please export these variables to future processes so the build can be traced, ` +
`for example by running ". ${shEnvFile}".`);
}
}
@@ -180,7 +186,6 @@ program
throw new Error("Config file could not be found at expected location. " +
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
}
util_1.setupActionsVars(config.tempDir, config.toolCacheDir);
importTracerEnvironment(config);
let language = undefined;
if (cmd.language !== undefined) {
@@ -210,8 +215,7 @@ program
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.option("--github-auth <auth>", "GitHub Apps token or personal access token. This option is insecure and deprecated, please use `--github-auth-stdin` instead.")
.option("--github-auth-stdin", "Read GitHub Apps token or personal access token from stdin.")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--no-upload", "Do not upload results after analysis.")
.option("--output-dir <dir>", "Directory to output SARIF files to. Default is in the temp directory.")
@@ -224,24 +228,14 @@ program
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
try {
const tempDir = getTempDir(cmd.tempDir);
const outputDir = cmd.outputDir || path.join(tempDir, "codeql-sarif");
const config = await config_utils_1.getConfig(getTempDir(cmd.tempDir), logger);
if (config === undefined) {
throw new Error("Config file could not be found at expected location. " +
"Was the 'init' command run with the same '--temp-dir' argument as this command.");
}
util_1.setupActionsVars(config.tempDir, config.toolCacheDir);
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
const apiDetails = {
auth,
url: util_1.parseGitHubUrl(cmd.githubUrl),
};
const outputDir = cmd.outputDir || path.join(config.tempDir, "codeql-sarif");
await analyze_1.runAnalyze(outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), config, logger);
if (!cmd.upload) {
logger.info("Not uploading results");
return;
}
await upload_lib.uploadFromRunner(outputDir, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.checkoutPath || process.cwd(), config.gitHubVersion, apiDetails, logger);
await analyze_1.runAnalyze(repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), cmd.upload, "runner", outputDir, util_1.getMemoryFlag(cmd.ram), util_1.getAddSnippetsFlag(cmd.addSnippets), util_1.getThreadsFlag(cmd.threads, logger), config, logger);
}
catch (e) {
logger.error("Analyze failed");
@@ -257,20 +251,13 @@ program
.requiredOption("--commit <commit>", "SHA of commit that was analyzed. (Required)")
.requiredOption("--ref <ref>", "Name of ref that was analyzed. (Required)")
.requiredOption("--github-url <url>", "URL of GitHub instance. (Required)")
.option("--github-auth <auth>", "GitHub Apps token or personal access token. This option is insecure and deprecated, please use `--github-auth-stdin` instead.")
.option("--github-auth-stdin", "Read GitHub Apps token or personal access token from stdin.")
.requiredOption("--github-auth <auth>", "GitHub Apps token or personal access token. (Required)")
.option("--checkout-path <path>", "Checkout path. Default is the current working directory.")
.option("--debug", "Print more verbose output", false)
.action(async (cmd) => {
const logger = logging_1.getRunnerLogger(cmd.debug);
const auth = await util_1.getGitHubAuth(logger, cmd.githubAuth, cmd.githubAuthStdin);
const apiDetails = {
auth,
url: util_1.parseGitHubUrl(cmd.githubUrl),
};
try {
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
await upload_lib.uploadFromRunner(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), cmd.checkoutPath || process.cwd(), gitHubVersion, apiDetails, logger);
await upload_lib.upload(cmd.sarifFile, repository_1.parseRepositoryNwo(cmd.repository), cmd.commit, parseRef(cmd.ref), undefined, undefined, undefined, cmd.checkoutPath || process.cwd(), undefined, cmd.githubAuth, parseGithubUrl(cmd.githubUrl), "runner", logger);
}
catch (e) {
logger.error("Upload failed");

File diff suppressed because one or more lines are too long

View File

@@ -7,8 +7,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
/**
* Wrapper for toolrunner.Toolrunner which checks for specific return code and/or regex matches in console output.
* Output will be streamed to the live console as well as captured for subsequent processing.
@@ -51,7 +50,7 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
// we capture the original return code or error so that if no match is found we can duplicate the behavior
let returnState;
try {
returnState = await new toolrunner.ToolRunner(await safeWhich.safeWhich(commandLine), args, {
returnState = await new toolrunnner.ToolRunner(commandLine, args, {
...options,
listeners,
ignoreReturnCode: true,
@@ -76,7 +75,7 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
return returnState;
}
else {
throw new Error(`The process '${commandLine}' failed with exit code ${returnState}`);
throw new Error(`The process \'${commandLine}\' failed with exit code ${returnState}`);
}
}
else {

View File

@@ -1 +1 @@
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,aAAA,OAAO,0CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,aAAA,OAAO,0CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI;SACvB,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,CAAC;KACjB;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW,WAChC,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,EAAC,WACjC,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,EAAC,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,UAAI,OAAO,0CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAzED,wDAyEC"}
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;AACA,0EAA4D;AAI5D;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,aAAA,OAAO,0CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,aAAA,OAAO,0CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;iBAAM;gBACL,4FAA4F;gBAC5F,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aAC5B;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,WAAW,CAAC,UAAU,CAAC,WAAW,EAAE,IAAI,EAAE;YAChE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI;SACvB,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,CAAC;KACjB;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW,WAChC,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,EAAC,WACjC,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,EAAC,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,UAAI,OAAO,0CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,iBAAiB,WAAW,4BAA4B,WAAW,EAAE,CACtE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AArED,wDAqEC"}

12
lib/tracer-config.js generated
View File

@@ -13,9 +13,13 @@ const languages_1 = require("./languages");
const util = __importStar(require("./util"));
const CRITICAL_TRACER_VARS = new Set([
"SEMMLE_PRELOAD_libtrace",
,
"SEMMLE_RUNNER",
,
"SEMMLE_COPY_EXECUTABLES_ROOT",
,
"SEMMLE_DEPTRACE_SOCKET",
,
"SEMMLE_JAVA_TOOL_OPTIONS",
]);
async function getTracerConfigForLanguage(codeql, config, language) {
@@ -109,7 +113,7 @@ function concatTracerConfigs(tracerConfigs, config) {
for (const e of Object.entries(env)) {
const key = e[0];
const value = e[1];
const lineBuffer = Buffer.from(`${key}=${value}\0`, "utf8");
const lineBuffer = new Buffer(`${key}=${value}\0`, "utf8");
const sizeBuffer = Buffer.alloc(4);
sizeBuffer.writeInt32LE(lineBuffer.length, 0);
buffer = Buffer.concat([buffer, sizeBuffer, lineBuffer]);
@@ -141,12 +145,6 @@ async function getCombinedTracerConfig(config, codeql) {
else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
}
// On macos it's necessary to prefix the build command with the runner executable
// on order to trace when System Integrity Protection is enabled.
// The executable also exists and works for other platforms so we output this env
// var with a path to the runner regardless so it's always available.
const runnerExeName = process.platform === "win32" ? "runner.exe" : "runner";
mainTracerConfig.env["CODEQL_RUNNER"] = path.join(mainTracerConfig.env["CODEQL_DIST"], "tools", mainTracerConfig.env["CODEQL_PLATFORM"], runnerExeName);
return mainTracerConfig;
}
exports.getCombinedTracerConfig = getCombinedTracerConfig;

File diff suppressed because one or more lines are too long

View File

@@ -1,4 +1,7 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
@@ -6,13 +9,10 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const ava_1 = __importDefault(require("ava"));
const codeql_1 = require("./codeql");
const languages_1 = require("./languages");
const testing_utils_1 = require("./testing-utils");
@@ -29,7 +29,6 @@ function getTestConfig(tmpDir) {
tempDir: tmpDir,
toolCacheDir: tmpDir,
codeQLCmd: "",
gitHubVersion: { type: util.GitHubVariant.DOTCOM },
};
}
// A very minimal setup
@@ -59,6 +58,7 @@ ava_1.default("getTracerConfigForLanguage - existing / critical vars", async (t)
process.env["SEMMLE_COPY_EXECUTABLES_ROOT"] = "abc";
process.env["SEMMLE_DEPTRACE_SOCKET"] = "abc";
process.env["SEMMLE_JAVA_TOOL_OPTIONS"] = "abc";
process.env["SEMMLE_DEPTRACE_SOCKET"] = "abc";
process.env["CODEQL_VAR"] = "abc";
// Now CodeQL returns all these variables, and one more, with different values
const codeQL = codeql_1.setCodeQL({
@@ -238,7 +238,6 @@ ava_1.default("getCombinedTracerConfig - return undefined when no languages are
async getTracerEnv() {
return {
ODASA_TRACER_CONFIGURATION: "abc",
CODEQL_DIST: "/",
foo: "bar",
};
},
@@ -251,28 +250,17 @@ ava_1.default("getCombinedTracerConfig - valid spec file", async (t) => {
const config = getTestConfig(tmpDir);
const spec = path.join(tmpDir, "spec");
fs.writeFileSync(spec, "foo.log\n2\nabc\ndef");
const bundlePath = path.join(tmpDir, "bundle");
const codeqlPlatform = process.platform === "win32"
? "win64"
: process.platform === "darwin"
? "osx64"
: "linux64";
const codeQL = codeql_1.setCodeQL({
async getTracerEnv() {
return {
ODASA_TRACER_CONFIGURATION: spec,
CODEQL_DIST: bundlePath,
CODEQL_PLATFORM: codeqlPlatform,
foo: "bar",
};
},
});
const result = await tracer_config_1.getCombinedTracerConfig(config, codeQL);
t.notDeepEqual(result, undefined);
const expectedEnv = {
foo: "bar",
CODEQL_DIST: bundlePath,
CODEQL_PLATFORM: codeqlPlatform,
ODASA_TRACER_CONFIGURATION: result.spec,
};
if (process.platform === "darwin") {
@@ -281,15 +269,6 @@ ava_1.default("getCombinedTracerConfig - valid spec file", async (t) => {
else if (process.platform !== "win32") {
expectedEnv["LD_PRELOAD"] = path.join(path.dirname(codeQL.getPath()), "tools", "linux64", "${LIB}trace.so");
}
if (process.platform === "win32") {
expectedEnv["CODEQL_RUNNER"] = path.join(bundlePath, "tools/win64/runner.exe");
}
else if (process.platform === "darwin") {
expectedEnv["CODEQL_RUNNER"] = path.join(bundlePath, "tools/osx64/runner");
}
else {
expectedEnv["CODEQL_RUNNER"] = path.join(bundlePath, "tools/linux64/runner");
}
t.deepEqual(result, {
spec: path.join(tmpDir, "compound-spec"),
env: expectedEnv,

File diff suppressed because one or more lines are too long

199
lib/upload-lib.js generated
View File

@@ -10,17 +10,14 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const zlib_1 = __importDefault(require("zlib"));
const core = __importStar(require("@actions/core"));
const file_url_1 = __importDefault(require("file-url"));
const fs = __importStar(require("fs"));
const jsonschema = __importStar(require("jsonschema"));
const semver = __importStar(require("semver"));
const actionsUtil = __importStar(require("./actions-util"));
const path = __importStar(require("path"));
const zlib_1 = __importDefault(require("zlib"));
const api = __importStar(require("./api-client"));
const fingerprints = __importStar(require("./fingerprints"));
const repository_1 = require("./repository");
const sharedEnv = __importStar(require("./shared-environment"));
const util = __importStar(require("./util"));
// Takes a list of paths to sarif files and combines them together,
@@ -37,7 +34,7 @@ function combineSarifFiles(sarifFiles) {
combinedSarif.version = sarifObject.version;
}
else if (combinedSarif.version !== sarifObject.version) {
throw new Error(`Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`);
throw `Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`;
}
combinedSarif.runs.push(...sarifObject.runs);
}
@@ -46,91 +43,85 @@ function combineSarifFiles(sarifFiles) {
exports.combineSarifFiles = combineSarifFiles;
// Upload the given payload.
// If the request fails then this will retry a small number of times.
async function uploadPayload(payload, repositoryNwo, apiDetails, mode, logger) {
async function uploadPayload(payload, repositoryNwo, githubAuth, githubUrl, mode, logger) {
logger.info("Uploading results");
// If in test mode we don't want to upload the results
const testMode = process.env["TEST_MODE"] === "true" || false;
if (testMode) {
return;
}
const client = api.getApiClient(apiDetails);
const reqURL = mode === "actions"
? "PUT /repos/:owner/:repo/code-scanning/analysis"
: "POST /repos/:owner/:repo/code-scanning/sarifs";
const response = await client.request(reqURL, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
data: payload,
});
logger.debug(`response status: ${response.status}`);
logger.info("Successfully uploaded results");
}
// Recursively walks a directory and returns all SARIF files it finds.
// Does not follow symlinks.
function findSarifFilesInDir(sarifPath) {
const sarifFiles = [];
const walkSarifFiles = (dir) => {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && entry.name.endsWith(".sarif")) {
sarifFiles.push(path.resolve(dir, entry.name));
}
else if (entry.isDirectory()) {
walkSarifFiles(path.resolve(dir, entry.name));
}
// Make up to 4 attempts to upload, and sleep for these
// number of seconds between each attempt.
// We don't want to backoff too much to avoid wasting action
// minutes, but just waiting a little bit could maybe help.
const backoffPeriods = [1, 5, 15];
const client = api.getApiClient(githubAuth, githubUrl);
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
const reqURL = mode === "actions"
? "PUT /repos/:owner/:repo/code-scanning/analysis"
: "POST /repos/:owner/:repo/code-scanning/sarifs";
const response = await client.request(reqURL, {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
data: payload,
});
logger.debug(`response status: ${response.status}`);
const statusCode = response.status;
if (statusCode === 202) {
logger.info("Successfully uploaded results");
return;
}
};
walkSarifFiles(sarifPath);
return sarifFiles;
const requestID = response.headers["x-github-request-id"];
// On any other status code that's not 5xx mark the upload as failed
if (!statusCode || statusCode < 500 || statusCode >= 600) {
throw new Error(`Upload failed (${requestID}): (${statusCode}) ${JSON.stringify(response.data)}`);
}
// On a 5xx status code we may retry the request
if (attempt < backoffPeriods.length) {
// Log the failure as a warning but don't mark the action as failed yet
logger.warning(`Upload attempt (${attempt + 1} of ${backoffPeriods.length + 1}) failed (${requestID}). Retrying in ${backoffPeriods[attempt]} seconds: (${statusCode}) ${JSON.stringify(response.data)}`);
// Sleep for the backoff period
await new Promise((r) => setTimeout(r, backoffPeriods[attempt] * 1000));
continue;
}
else {
// If the upload fails with 5xx then we assume it is a temporary problem
// and not an error that the user has caused or can fix.
// We avoid marking the job as failed to avoid breaking CI workflows.
throw new Error(`Upload failed (${requestID}): (${statusCode}) ${JSON.stringify(response.data)}`);
}
}
// This case shouldn't ever happen as the final iteration of the loop
// will always throw an error instead of exiting to here.
throw new Error("Upload failed");
}
exports.findSarifFilesInDir = findSarifFilesInDir;
// Uploads a single sarif file or a directory of sarif files
// depending on what the path happens to refer to.
// Returns true iff the upload occurred and succeeded
async function uploadFromActions(sarifPath, gitHubVersion, apiDetails, logger) {
return await uploadFiles(getSarifFilePaths(sarifPath), repository_1.parseRepositoryNwo(actionsUtil.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), gitHubVersion, apiDetails, "actions", logger);
}
exports.uploadFromActions = uploadFromActions;
// Uploads a single sarif file or a directory of sarif files
// depending on what the path happens to refer to.
// Returns true iff the upload occurred and succeeded
async function uploadFromRunner(sarifPath, repositoryNwo, commitOid, ref, checkoutPath, gitHubVersion, apiDetails, logger) {
return await uploadFiles(getSarifFilePaths(sarifPath), repositoryNwo, commitOid, ref, undefined, undefined, undefined, checkoutPath, undefined, gitHubVersion, apiDetails, "runner", logger);
}
exports.uploadFromRunner = uploadFromRunner;
function getSarifFilePaths(sarifPath) {
async function upload(sarifPath, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger) {
const sarifFiles = [];
if (!fs.existsSync(sarifPath)) {
throw new Error(`Path does not exist: ${sarifPath}`);
}
let sarifFiles;
if (fs.lstatSync(sarifPath).isDirectory()) {
sarifFiles = findSarifFilesInDir(sarifPath);
fs.readdirSync(sarifPath)
.filter((f) => f.endsWith(".sarif"))
.map((f) => path.resolve(sarifPath, f))
.forEach((f) => sarifFiles.push(f));
if (sarifFiles.length === 0) {
throw new Error(`No SARIF files found to upload in "${sarifPath}".`);
}
}
else {
sarifFiles = [sarifPath];
sarifFiles.push(sarifPath);
}
return sarifFiles;
return await uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger);
}
exports.upload = upload;
// Counts the number of results in the given SARIF file
function countResultsInSarif(sarif) {
let numResults = 0;
let parsedSarif;
try {
parsedSarif = JSON.parse(sarif);
}
catch (e) {
throw new Error(`Invalid SARIF. JSON syntax error: ${e.message}`);
}
if (!Array.isArray(parsedSarif.runs)) {
throw new Error("Invalid SARIF. Missing 'runs' array.");
}
for (const run of parsedSarif.runs) {
if (!Array.isArray(run.results)) {
throw new Error("Invalid SARIF. Missing 'results' array in run.");
}
for (const run of JSON.parse(sarif).runs) {
numResults += run.results.length;
}
return numResults;
@@ -156,50 +147,9 @@ function validateSarifFileSchema(sarifFilePath, logger) {
}
}
exports.validateSarifFileSchema = validateSarifFileSchema;
// buildPayload constructs a map ready to be uploaded to the API from the given
// parameters, respecting the current mode and target GitHub instance version.
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion, mode) {
if (mode === "actions") {
const payloadObj = {
commit_oid: commitOid,
ref,
analysis_key: analysisKey,
analysis_name: analysisName,
sarif: zippedSarif,
workflow_run_id: workflowRunID,
checkout_uri: checkoutURI,
environment,
started_at: process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT],
tool_names: toolNames,
base_ref: undefined,
base_sha: undefined,
};
// This behaviour can be made the default when support for GHES 3.0 is discontinued.
if (gitHubVersion.type !== util.GitHubVariant.GHES ||
semver.satisfies(gitHubVersion.version, `>=3.1`)) {
if (process.env.GITHUB_EVENT_NAME === "pull_request" &&
process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
}
}
return payloadObj;
}
else {
return {
commit_sha: commitOid,
ref,
sarif: zippedSarif,
checkout_uri: checkoutURI,
tool_name: toolNames[0],
};
}
}
exports.buildPayload = buildPayload;
// Uploads the given set of sarif files.
// Returns true iff the upload occurred and succeeded
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, gitHubVersion, apiDetails, mode, logger) {
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubUrl, mode, logger) {
logger.info(`Uploading sarif files: ${JSON.stringify(sarifFiles)}`);
if (mode === "actions") {
// This check only works on actions as env vars don't persist between calls to the runner
@@ -215,19 +165,42 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
}
let sarifPayload = combineSarifFiles(sarifFiles);
sarifPayload = fingerprints.addFingerprints(sarifPayload, checkoutPath, logger);
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = file_url_1.default(checkoutPath);
const toolNames = util.getToolNames(sarifPayload);
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion, mode);
let payload;
if (mode === "actions") {
payload = JSON.stringify({
commit_oid: commitOid,
ref,
analysis_key: analysisKey,
analysis_name: analysisName,
sarif: zipped_sarif,
workflow_run_id: workflowRunID,
checkout_uri: checkoutURI,
environment,
started_at: process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT],
tool_names: toolNames,
});
}
else {
payload = JSON.stringify({
commit_sha: commitOid,
ref,
sarif: zipped_sarif,
checkout_uri: checkoutURI,
tool_name: toolNames[0],
});
}
// Log some useful debug info about the info
const rawUploadSizeBytes = sarifPayload.length;
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
const zippedUploadSizeBytes = zippedSarif.length;
const zippedUploadSizeBytes = zipped_sarif.length;
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
const numResultInSarif = countResultsInSarif(sarifPayload);
logger.debug(`Number of results in upload: ${numResultInSarif}`);
// Make the upload
await uploadPayload(payload, repositoryNwo, apiDetails, mode, logger);
await uploadPayload(payload, repositoryNwo, githubAuth, githubUrl, mode, logger);
return {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,

File diff suppressed because one or more lines are too long

65
lib/upload-lib.test.js generated
View File

@@ -1,4 +1,7 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
@@ -6,17 +9,11 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const ava_1 = __importDefault(require("ava"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const uploadLib = __importStar(require("./upload-lib"));
const util_1 = require("./util");
testing_utils_1.setupTests(ava_1.default);
ava_1.default("validateSarifFileSchema - valid", (t) => {
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
@@ -26,60 +23,4 @@ ava_1.default("validateSarifFileSchema - invalid", (t) => {
const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`;
t.throws(() => uploadLib.validateSarifFileSchema(inputFile, logging_1.getRunnerLogger(true)));
});
ava_1.default("validate correct payload used per version", async (t) => {
const newVersions = [
{ type: util_1.GitHubVariant.DOTCOM },
{ type: util_1.GitHubVariant.GHES, version: "3.1.0" },
];
const oldVersions = [
{ type: util_1.GitHubVariant.GHES, version: "2.22.1" },
{ type: util_1.GitHubVariant.GHES, version: "3.0.0" },
];
const allVersions = newVersions.concat(oldVersions);
process.env["GITHUB_EVENT_NAME"] = "push";
for (const version of allVersions) {
const payload = uploadLib.buildPayload("commit", "refs/heads/master", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "actions");
// Not triggered by a pull request
t.falsy(payload.base_ref);
t.falsy(payload.base_sha);
}
process.env["GITHUB_EVENT_NAME"] = "pull_request";
process.env["GITHUB_EVENT_PATH"] = `${__dirname}/../src/testdata/pull_request.json`;
for (const version of newVersions) {
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "actions");
t.deepEqual(payload.base_ref, "refs/heads/master");
t.deepEqual(payload.base_sha, "f95f852bd8fca8fcc58a9a2d6c842781e32a215e");
}
for (const version of oldVersions) {
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "actions");
// These older versions won't expect these values
t.falsy(payload.base_ref);
t.falsy(payload.base_sha);
}
});
ava_1.default("finding SARIF files", async (t) => {
await util_1.withTmpDir(async (tmpDir) => {
// include a couple of sarif files
fs.writeFileSync(path.join(tmpDir, "a.sarif"), "");
fs.writeFileSync(path.join(tmpDir, "b.sarif"), "");
// other random files shouldn't be returned
fs.writeFileSync(path.join(tmpDir, "c.foo"), "");
// we should recursively look in subdirectories
fs.mkdirSync(path.join(tmpDir, "dir1"));
fs.writeFileSync(path.join(tmpDir, "dir1", "d.sarif"), "");
fs.mkdirSync(path.join(tmpDir, "dir1", "dir2"));
fs.writeFileSync(path.join(tmpDir, "dir1", "dir2", "e.sarif"), "");
// we should ignore symlinks
fs.mkdirSync(path.join(tmpDir, "dir3"));
fs.symlinkSync(tmpDir, path.join(tmpDir, "dir3", "symlink1"), "dir");
fs.symlinkSync(path.join(tmpDir, "a.sarif"), path.join(tmpDir, "dir3", "symlink2.sarif"), "file");
const sarifFiles = uploadLib.findSarifFilesInDir(tmpDir);
t.deepEqual(sarifFiles, [
path.join(tmpDir, "a.sarif"),
path.join(tmpDir, "b.sarif"),
path.join(tmpDir, "dir1", "d.sarif"),
path.join(tmpDir, "dir1", "dir2", "e.sarif"),
]);
});
});
//# sourceMappingURL=upload-lib.test.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,8CAAuB;AAEvB,uCAA4C;AAC5C,mDAA6C;AAC7C,wDAA0C;AAC1C,iCAAkE;AAElE,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,SAAS,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CACf,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC9C,MAAM,SAAS,GAAG,GAAG,SAAS,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,2CAA2C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5D,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,oBAAa,CAAC,MAAM,EAAE;QAC9B,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC/C,CAAC;IACF,MAAM,WAAW,GAAoB;QACnC,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE;QAC/C,EAAE,IAAI,EAAE,oBAAa,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE;KAC/C,CAAC;IACF,MAAM,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;IAEpD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,MAAM,CAAC;IAC1C,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,mBAAmB,EACnB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,kCAAkC;QAClC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;IAED,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,cAAc,CAAC;IAClD,OAAO,CAAC,GAAG,CACT,mBAAmB,CACpB,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACrD,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,QAAQ,EAAE,mBAAmB,CAAC,CAAC;QACnD,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC,QAAQ,EAAE,0CAA0C,CAAC,CAAC;KAC3E;IAED,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,MAAM,OAAO,GAAQ,SAAS,CAAC,YAAY,CACzC,QAAQ,EACR,qBAAqB,EACrB,KAAK,EACL,SAAS,EACT,EAAE,EACF,SAAS,EACT,UAAU,EACV,SAAS,EACT,CAAC,QAAQ,EAAE,QAAQ,CAAC,EACpB,OAAO,EACP,SAAS,CACV,CAAC;QACF,iDAAiD;QACjD,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1B,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAC3B;AACH,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,qBAAqB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACtC,MAAM,iBAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,kCAAkC;QAClC,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QACnD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAEnD,2CAA2C;QAC3C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,EAAE,CAAC,CAAC;QAEjD,+CAA+C;QAC/C,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QACxC,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAC3D,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QAChD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC;QAEnE,4BAA4B;QAC5B,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;QACxC,EAAE,CAAC,WAAW,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,KAAK,CAAC,CAAC;QACrE,EAAE,CAAC,WAAW,CACZ,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,EAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,gBAAgB,CAAC,EAC3C,MAAM,CACP,CAAC;QAEF,MAAM,UAAU,GAAG,SAAS,CAAC,mBAAmB,CAAC,MAAM,CAAC,CAAC;QAEzD,CAAC,CAAC,SAAS,CAAC,UAAU,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;YAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;YAC5B,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC;YACpC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,CAAC;SAC7C,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,uCAA4C;AAC5C,mDAA6C;AAC7C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC5C,MAAM,SAAS,GAAG,GAAG,SAAS,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CACf,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE,EAAE;IAC9C,MAAM,SAAS,GAAG,GAAG,SAAS,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CACZ,SAAS,CAAC,uBAAuB,CAAC,SAAS,EAAE,yBAAe,CAAC,IAAI,CAAC,CAAC,CACpE,CAAC;AACJ,CAAC,CAAC,CAAC"}

View File

@@ -8,47 +8,36 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util_1 = require("./util");
const util = __importStar(require("./util"));
async function sendSuccessStatusReport(startedAt, uploadStats) {
const statusReportBase = await actionsUtil.createStatusReportBase("upload-sarif", "success", startedAt);
const statusReportBase = await util.createStatusReportBase("upload-sarif", "success", startedAt);
const statusReport = {
...statusReportBase,
...uploadStats,
};
await actionsUtil.sendStatusReport(statusReport);
await util.sendStatusReport(statusReport);
}
async function run() {
const startedAt = new Date();
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", "starting", startedAt)))) {
if (!(await util.sendStatusReport(await util.createStatusReportBase("upload-sarif", "starting", startedAt), true))) {
return;
}
try {
const apiDetails = {
auth: actionsUtil.getRequiredInput("token"),
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
};
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
const uploadStats = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, logging_1.getActionsLogger());
const uploadStats = await upload_lib.upload(util.getRequiredInput("sarif_file"), repository_1.parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await util.getCommitOid(), util.getRef(), await util.getAnalysisKey(), util.getRequiredEnvParam("GITHUB_WORKFLOW"), util.getWorkflowRunID(), util.getRequiredInput("checkout_path"), util.getRequiredInput("matrix"), util.getRequiredInput("token"), util.getRequiredEnvParam("GITHUB_SERVER_URL"), "actions", logging_1.getActionsLogger());
await sendSuccessStatusReport(startedAt, uploadStats);
}
catch (error) {
core.setFailed(error.message);
console.log(error);
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", "failure", startedAt, error.message, error.stack));
await util.sendStatusReport(await util.createStatusReportBase("upload-sarif", "failure", startedAt, error.message, error.stack));
return;
}
}
async function runWrapper() {
try {
await run();
}
catch (error) {
core.setFailed(`codeql/upload-sarif action failed: ${error}`);
console.log(error);
}
}
void runWrapper();
run().catch((e) => {
core.setFailed(`codeql/upload-sarif action failed: ${e}`);
console.log(e);
});
//# sourceMappingURL=upload-sarif-action.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,yDAA2C;AAC3C,iCAA0C;AAM1C,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,WAAW,CAAC,mBAAmB,CAAC,mBAAmB,CAAC;SAC1D,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,uBAAgB,CAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,0BAAgB,EAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAM/B,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAC3B,MAAM,IAAI,CAAC,sBAAsB,CAAC,cAAc,EAAE,UAAU,EAAE,SAAS,CAAC,EACxE,IAAI,CACL,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CACzC,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,EACnC,+BAAkB,CAAC,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,CAAC,EACjE,MAAM,IAAI,CAAC,YAAY,EAAE,EACzB,IAAI,CAAC,MAAM,EAAE,EACb,MAAM,IAAI,CAAC,cAAc,EAAE,EAC3B,IAAI,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,EAC3C,IAAI,CAAC,gBAAgB,EAAE,EACvB,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC,EACtC,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EAC/B,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC,EAC9B,IAAI,CAAC,mBAAmB,CAAC,mBAAmB,CAAC,EAC7C,SAAS,EACT,0BAAgB,EAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAI,CAAC,gBAAgB,CACzB,MAAM,IAAI,CAAC,sBAAsB,CAC/B,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;IAChB,IAAI,CAAC,SAAS,CAAC,sCAAsC,CAAC,EAAE,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}

429
lib/util.js generated
View File

@@ -7,17 +7,29 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const toolrunnner = __importStar(require("@actions/exec/lib/toolrunner"));
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
const core = __importStar(require("@actions/core"));
const semver = __importStar(require("semver"));
const api_client_1 = require("./api-client");
const apiCompatibility = __importStar(require("./api-compatibility.json"));
const api = __importStar(require("./api-client"));
const sharedEnv = __importStar(require("./shared-environment"));
/**
* The URL for github.com.
*/
exports.GITHUB_DOTCOM_URL = "https://github.com";
/**
* Get an environment parameter, but throw an error if it is not set.
*/
function getRequiredEnvParam(paramName) {
const value = process.env[paramName];
if (value === undefined || value.length === 0) {
throw new Error(`${paramName} environment variable must be set`);
}
core.debug(`${paramName}=${value}`);
return value;
}
exports.getRequiredEnvParam = getRequiredEnvParam;
/**
* Get the extra options for the codeql commands.
*/
@@ -41,6 +53,222 @@ function isLocalRun() {
process.env.CODEQL_LOCAL_RUN !== "0");
}
exports.isLocalRun = isLocalRun;
/**
* Ensures all required environment variables are set in the context of a local run.
*/
function prepareLocalRunEnvironment() {
if (!isLocalRun()) {
return;
}
core.debug("Action is running locally.");
if (!process.env.GITHUB_JOB) {
core.exportVariable("GITHUB_JOB", "UNKNOWN-JOB");
}
}
exports.prepareLocalRunEnvironment = prepareLocalRunEnvironment;
/**
* Gets the SHA of the commit that is currently checked out.
*/
async function getCommitOid() {
// Try to use git to get the current commit SHA. If that fails then
// log but otherwise silently fall back to using the SHA from the environment.
// The only time these two values will differ is during analysis of a PR when
// the workflow has changed the current commit to the head commit instead of
// the merge commit, which must mean that git is available.
// Even if this does go wrong, it's not a huge problem for the alerts to
// reported on the merge commit.
try {
let commitOid = "";
await new toolrunnner.ToolRunner("git", ["rev-parse", "HEAD"], {
silent: true,
listeners: {
stdout: (data) => {
commitOid += data.toString();
},
stderr: (data) => {
process.stderr.write(data);
},
},
}).exec();
return commitOid.trim();
}
catch (e) {
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
return getRequiredEnvParam("GITHUB_SHA");
}
}
exports.getCommitOid = getCommitOid;
/**
* Get the path of the currently executing workflow.
*/
async function getWorkflowPath() {
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
const owner = repo_nwo[0];
const repo = repo_nwo[1];
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
const apiClient = api.getActionsApiClient();
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id", {
owner,
repo,
run_id,
});
const workflowUrl = runsResponse.data.workflow_url;
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
return workflowResponse.data.path;
}
/**
* Get the workflow run ID.
*/
function getWorkflowRunID() {
const workflowRunID = parseInt(getRequiredEnvParam("GITHUB_RUN_ID"), 10);
if (Number.isNaN(workflowRunID)) {
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
}
return workflowRunID;
}
exports.getWorkflowRunID = getWorkflowRunID;
/**
* Get the analysis key paramter for the current job.
*
* This will combine the workflow path and current job name.
* Computing this the first time requires making requests to
* the github API, but after that the result will be cached.
*/
async function getAnalysisKey() {
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
let analysisKey = process.env[analysisKeyEnvVar];
if (analysisKey !== undefined) {
return analysisKey;
}
const workflowPath = await getWorkflowPath();
const jobName = getRequiredEnvParam("GITHUB_JOB");
analysisKey = `${workflowPath}:${jobName}`;
core.exportVariable(analysisKeyEnvVar, analysisKey);
return analysisKey;
}
exports.getAnalysisKey = getAnalysisKey;
/**
* Get the ref currently being analyzed.
*/
function getRef() {
// Will be in the form "refs/heads/master" on a push event
// or in the form "refs/pull/N/merge" on a pull_request event
const ref = getRequiredEnvParam("GITHUB_REF");
// For pull request refs we want to convert from the 'merge' ref
// to the 'head' ref, as that is what we want to analyse.
// There should have been some code earlier in the workflow to do
// the checkout, but we have no way of verifying that here.
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
if (pull_ref_regex.test(ref)) {
return ref.replace(pull_ref_regex, "refs/pull/$1/head");
}
else {
return ref;
}
}
exports.getRef = getRef;
/**
* Compose a StatusReport.
*
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
* @param status The status. Must be 'success', 'failure', or 'starting'
* @param startedAt The time this action started executing.
* @param cause Cause of failure (only supply if status is 'failure')
* @param exception Exception (only supply if status is 'failure')
*/
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
const commitOid = process.env["GITHUB_SHA"] || "";
const ref = getRef();
const workflowRunIDStr = process.env["GITHUB_RUN_ID"];
let workflowRunID = -1;
if (workflowRunIDStr) {
workflowRunID = parseInt(workflowRunIDStr, 10);
}
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
const jobName = process.env["GITHUB_JOB"] || "";
const analysis_key = await getAnalysisKey();
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
if (workflowStartedAt === undefined) {
workflowStartedAt = actionStartedAt.toISOString();
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
}
const statusReport = {
workflow_run_id: workflowRunID,
workflow_name: workflowName,
job_name: jobName,
analysis_key,
commit_oid: commitOid,
ref,
action_name: actionName,
action_oid: "unknown",
started_at: workflowStartedAt,
action_started_at: actionStartedAt.toISOString(),
status,
};
// Add optional parameters
if (cause) {
statusReport.cause = cause;
}
if (exception) {
statusReport.exception = exception;
}
if (status === "success" || status === "failure" || status === "aborted") {
statusReport.completed_at = new Date().toISOString();
}
const matrix = getOptionalInput("matrix");
if (matrix) {
statusReport.matrix_vars = matrix;
}
return statusReport;
}
exports.createStatusReportBase = createStatusReportBase;
/**
* Send a status report to the code_scanning/analysis/status endpoint.
*
* Optionally checks the response from the API endpoint and sets the action
* as failed if the status report failed. This is only expected to be used
* when sending a 'starting' report.
*
* Returns whether sending the status report was successful of not.
*/
async function sendStatusReport(statusReport, ignoreFailures) {
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== exports.GITHUB_DOTCOM_URL) {
core.debug("Not sending status report to GitHub Enterprise");
return true;
}
if (isLocalRun()) {
core.debug("Not sending status report because this is a local run");
return true;
}
const statusReportJSON = JSON.stringify(statusReport);
core.debug(`Sending status report: ${statusReportJSON}`);
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
const [owner, repo] = nwo.split("/");
const client = api.getActionsApiClient();
const statusResponse = await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
owner,
repo,
data: statusReportJSON,
});
if (!ignoreFailures) {
// If the status report request fails with a 403 or a 404, then this is a deliberate
// message from the endpoint that the SARIF upload can be expected to fail too,
// so the action should fail to avoid wasting actions minutes.
//
// Other failure responses (or lack thereof) could be transitory and should not
// cause the action to fail.
if (statusResponse.status === 403) {
core.setFailed("The repo on which this action is running is not opted-in to CodeQL code scanning.");
return false;
}
if (statusResponse.status === 404) {
core.setFailed("Not authorized to used the CodeQL code scanning feature on this repo.");
return false;
}
}
return true;
}
exports.sendStatusReport = sendStatusReport;
/**
* Get the array of all the tool names contained in the given sarif contents.
*
@@ -72,21 +300,9 @@ async function withTmpDir(body) {
return result;
}
exports.withTmpDir = withTmpDir;
/**
* Gets an OS-specific amount of memory (in MB) to reserve for OS processes
* when the user doesn't explicitly specify a memory setting.
* This is a heuristic to avoid OOM errors (exit code 137 / SIGKILL)
* from committing too much of the available memory to CodeQL.
* @returns number
*/
function getSystemReservedMemoryMegaBytes() {
// Windows needs more memory for OS processes.
return 1024 * (process.platform === "win32" ? 1.5 : 1);
}
/**
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
* specified, the total available memory will be used minus a threshold
* reserved for the OS.
* specified, the total available memory will be used minus 256 MB.
*
* @returns string
*/
@@ -101,8 +317,8 @@ function getMemoryFlag(userInput) {
else {
const totalMemoryBytes = os.totalmem();
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes();
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
const systemReservedMemoryMegaBytes = 256;
memoryToUseMegaBytes = totalMemoryMegaBytes - systemReservedMemoryMegaBytes;
}
return `--ram=${Math.floor(memoryToUseMegaBytes)}`;
}
@@ -168,172 +384,23 @@ function getCodeQLDatabasePath(tempDir, language) {
}
exports.getCodeQLDatabasePath = getCodeQLDatabasePath;
/**
* Parses user input of a github.com or GHES URL to a canonical form.
* Removes any API prefix or suffix if one is present.
* Wrapper for core.getInput. Returns undefined if the requested
* input is not specified.
*/
function parseGitHubUrl(inputUrl) {
const originalUrl = inputUrl;
if (inputUrl.indexOf("://") === -1) {
inputUrl = `https://${inputUrl}`;
function getOptionalInput(name) {
const value = core.getInput(name);
if (value === "") {
return undefined;
}
if (!inputUrl.startsWith("http://") && !inputUrl.startsWith("https://")) {
throw new Error(`"${originalUrl}" is not a http or https URL`);
}
let url;
try {
url = new URL(inputUrl);
}
catch (e) {
throw new Error(`"${originalUrl}" is not a valid URL`);
}
// If we detect this is trying to be to github.com
// then return with a fixed canonical URL.
if (url.hostname === "github.com" || url.hostname === "api.github.com") {
return exports.GITHUB_DOTCOM_URL;
}
// Remove the API prefix if it's present
if (url.pathname.indexOf("/api/v3") !== -1) {
url.pathname = url.pathname.substring(0, url.pathname.indexOf("/api/v3"));
}
// Also consider subdomain isolation on GHES
if (url.hostname.startsWith("api.")) {
url.hostname = url.hostname.substring(4);
}
// Normalise path to having a trailing slash for consistency
if (!url.pathname.endsWith("/")) {
url.pathname = `${url.pathname}/`;
}
return url.toString();
return value;
}
exports.parseGitHubUrl = parseGitHubUrl;
const GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
const CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR = "CODEQL_ACTION_WARNED_ABOUT_VERSION";
let hasBeenWarnedAboutVersion = false;
var GitHubVariant;
(function (GitHubVariant) {
GitHubVariant[GitHubVariant["DOTCOM"] = 0] = "DOTCOM";
GitHubVariant[GitHubVariant["GHES"] = 1] = "GHES";
GitHubVariant[GitHubVariant["GHAE"] = 2] = "GHAE";
})(GitHubVariant = exports.GitHubVariant || (exports.GitHubVariant = {}));
async function getGitHubVersion(apiDetails) {
// We can avoid making an API request in the standard dotcom case
if (parseGitHubUrl(apiDetails.url) === exports.GITHUB_DOTCOM_URL) {
return { type: GitHubVariant.DOTCOM };
}
// Doesn't strictly have to be the meta endpoint as we're only
// using the response headers which are available on every request.
const apiClient = api_client_1.getApiClient(apiDetails);
const response = await apiClient.meta.get();
// This happens on dotcom, although we expect to have already returned in that
// case. This can also serve as a fallback in cases we haven't foreseen.
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === undefined) {
return { type: GitHubVariant.DOTCOM };
}
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "GitHub AE") {
return { type: GitHubVariant.GHAE };
}
const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER];
return { type: GitHubVariant.GHES, version };
}
exports.getGitHubVersion = getGitHubVersion;
function checkGitHubVersionInRange(version, mode, logger) {
if (hasBeenWarnedAboutVersion || version.type !== GitHubVariant.GHES) {
return;
}
const disallowedAPIVersionReason = apiVersionInRange(version.version, apiCompatibility.minimumVersion, apiCompatibility.maximumVersion);
const toolName = mode === "actions" ? "Action" : "Runner";
if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_OLD) {
logger.warning(`The CodeQL ${toolName} version you are using is too old to be compatible with GitHub Enterprise ${version.version}. If you experience issues, please upgrade to a more recent version of the CodeQL ${toolName}.`);
}
if (disallowedAPIVersionReason === DisallowedAPIVersionReason.ACTION_TOO_NEW) {
logger.warning(`GitHub Enterprise ${version.version} is too old to be compatible with this version of the CodeQL ${toolName}. If you experience issues, please upgrade to a more recent version of GitHub Enterprise or use an older version of the CodeQL ${toolName}.`);
}
hasBeenWarnedAboutVersion = true;
if (mode === "actions") {
core.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true);
}
}
exports.checkGitHubVersionInRange = checkGitHubVersionInRange;
var DisallowedAPIVersionReason;
(function (DisallowedAPIVersionReason) {
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
function apiVersionInRange(version, minimumVersion, maximumVersion) {
if (!semver.satisfies(version, `>=${minimumVersion}`)) {
return DisallowedAPIVersionReason.ACTION_TOO_NEW;
}
if (!semver.satisfies(version, `<=${maximumVersion}`)) {
return DisallowedAPIVersionReason.ACTION_TOO_OLD;
}
return undefined;
}
exports.apiVersionInRange = apiVersionInRange;
exports.getOptionalInput = getOptionalInput;
/**
* Retrieves the github auth token for use with the runner. There are
* three possible locations for the token:
*
* 1. from the cli (considered insecure)
* 2. from stdin
* 3. from the GITHUB_TOKEN environment variable
*
* If both 1 & 2 are specified, then an error is thrown.
* If 1 & 3 or 2 & 3 are specified, then the environment variable is ignored.
*
* @param githubAuth a github app token or PAT
* @param fromStdIn read the github app token or PAT from stdin up to, but excluding the first whitespace
* @param readable the readable stream to use for getting the token (defaults to stdin)
*
* @return a promise resolving to the auth token.
* Wrapper for core.getInput. Returns the requested input or
* throws if not defined.
*/
async function getGitHubAuth(logger, githubAuth, fromStdIn, readable = process.stdin) {
if (githubAuth && fromStdIn) {
throw new Error("Cannot specify both `--github-auth` and `--github-auth-stdin`. Please use `--github-auth-stdin`, which is more secure.");
}
if (githubAuth) {
logger.warning("Using `--github-auth` via the CLI is insecure. Use `--github-auth-stdin` instead.");
return githubAuth;
}
if (fromStdIn) {
return new Promise((resolve, reject) => {
let token = "";
readable.on("data", (data) => {
token += data.toString("utf8");
});
readable.on("end", () => {
token = token.split(/\s+/)[0].trim();
if (token) {
resolve(token);
}
else {
reject(new Error("Standard input is empty"));
}
});
readable.on("error", (err) => {
reject(err);
});
});
}
if (process.env.GITHUB_TOKEN) {
return process.env.GITHUB_TOKEN;
}
throw new Error("No GitHub authentication token was specified. Please provide a token via the GITHUB_TOKEN environment variable, or by adding the `--github-auth-stdin` flag and passing the token via standard input.");
function getRequiredInput(name) {
return core.getInput(name, { required: true });
}
exports.getGitHubAuth = getGitHubAuth;
// Sets environment variables that make using some libraries designed for
// use only on actions safe to use outside of actions.
//
// Obviously this is not a tremendously great thing we're doing and it
// would be better to write our own implementation of libraries to use
// outside of actions. For now this works well enough.
//
// Currently this list of libraries that is deemed to now be safe includes:
// - @actions/tool-cache
//
// Also see "queries/unguarded-action-lib.ql".
function setupActionsVars(tempDir, toolsDir) {
process.env["RUNNER_TEMP"] = tempDir;
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
}
exports.setupActionsVars = setupActionsVars;
exports.getRequiredInput = getRequiredInput;
//# sourceMappingURL=util.js.map

File diff suppressed because one or more lines are too long

165
lib/util.test.js generated
View File

@@ -1,4 +1,7 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
@@ -6,17 +9,10 @@ var __importStar = (this && this.__importStar) || function (mod) {
result["default"] = mod;
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const ava_1 = __importDefault(require("ava"));
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const stream = __importStar(require("stream"));
const github = __importStar(require("@actions/github"));
const ava_1 = __importDefault(require("ava"));
const sinon_1 = __importDefault(require("sinon"));
const api = __importStar(require("./api-client"));
const logging_1 = require("./logging");
const testing_utils_1 = require("./testing-utils");
const util = __importStar(require("./util"));
@@ -28,13 +24,11 @@ ava_1.default("getToolNames", (t) => {
});
ava_1.default("getMemoryFlag() should return the correct --ram flag", (t) => {
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
const expectedThreshold = process.platform === "win32" ? 1536 : 1024;
const tests = [
[undefined, `--ram=${totalMem - expectedThreshold}`],
["", `--ram=${totalMem - expectedThreshold}`],
["512", "--ram=512"],
];
for (const [input, expectedFlag] of tests) {
const tests = {
"": `--ram=${totalMem - 256}`,
"512": "--ram=512",
};
for (const [input, expectedFlag] of Object.entries(tests)) {
const flag = util.getMemoryFlag(input);
t.deepEqual(flag, expectedFlag);
}
@@ -54,15 +48,13 @@ ava_1.default("getAddSnippetsFlag() should return the correct flag", (t) => {
});
ava_1.default("getThreadsFlag() should return the correct --threads flag", (t) => {
const numCpus = os.cpus().length;
const tests = [
["0", "--threads=0"],
["1", "--threads=1"],
[undefined, `--threads=${numCpus}`],
["", `--threads=${numCpus}`],
[`${numCpus + 1}`, `--threads=${numCpus}`],
[`${-numCpus - 1}`, `--threads=${-numCpus}`],
];
for (const [input, expectedFlag] of tests) {
const tests = {
"0": "--threads=0",
"1": "--threads=1",
[`${numCpus + 1}`]: `--threads=${numCpus}`,
[`${-numCpus - 1}`]: `--threads=${-numCpus}`,
};
for (const [input, expectedFlag] of Object.entries(tests)) {
const flag = util.getThreadsFlag(input, logging_1.getRunnerLogger(true));
t.deepEqual(flag, expectedFlag);
}
@@ -70,7 +62,12 @@ ava_1.default("getThreadsFlag() should return the correct --threads flag", (t) =
ava_1.default("getThreadsFlag() throws if the threads input is not an integer", (t) => {
t.throws(() => util.getThreadsFlag("hello!", logging_1.getRunnerLogger(true)));
});
ava_1.default("getRef() throws on the empty string", (t) => {
process.env["GITHUB_REF"] = "";
t.throws(util.getRef);
});
ava_1.default("isLocalRun() runs correctly", (t) => {
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
process.env.CODEQL_LOCAL_RUN = "";
t.assert(!util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = "false";
@@ -81,6 +78,24 @@ ava_1.default("isLocalRun() runs correctly", (t) => {
t.assert(util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = "hucairz";
t.assert(util.isLocalRun());
process.env.CODEQL_LOCAL_RUN = origLocalRun;
});
ava_1.default("prepareEnvironment() when a local run", (t) => {
const origLocalRun = process.env.CODEQL_LOCAL_RUN;
process.env.CODEQL_LOCAL_RUN = "false";
process.env.GITHUB_JOB = "YYY";
util.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, "YYY");
process.env.CODEQL_LOCAL_RUN = "true";
util.prepareLocalRunEnvironment();
// unchanged
t.deepEqual(process.env.GITHUB_JOB, "YYY");
process.env.GITHUB_JOB = "";
util.prepareLocalRunEnvironment();
// updated
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
process.env.CODEQL_LOCAL_RUN = origLocalRun;
});
ava_1.default("getExtraOptionsEnvParam() succeeds on valid JSON with invalid options (for now)", (t) => {
const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS;
@@ -102,106 +117,4 @@ ava_1.default("getExtraOptionsEnvParam() fails on invalid JSON", (t) => {
t.throws(util.getExtraOptionsEnvParam);
process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions;
});
ava_1.default("parseGitHubUrl", (t) => {
t.deepEqual(util.parseGitHubUrl("github.com"), "https://github.com");
t.deepEqual(util.parseGitHubUrl("https://github.com"), "https://github.com");
t.deepEqual(util.parseGitHubUrl("https://api.github.com"), "https://github.com");
t.deepEqual(util.parseGitHubUrl("https://github.com/foo/bar"), "https://github.com");
t.deepEqual(util.parseGitHubUrl("github.example.com"), "https://github.example.com/");
t.deepEqual(util.parseGitHubUrl("https://github.example.com"), "https://github.example.com/");
t.deepEqual(util.parseGitHubUrl("https://api.github.example.com"), "https://github.example.com/");
t.deepEqual(util.parseGitHubUrl("https://github.example.com/api/v3"), "https://github.example.com/");
t.deepEqual(util.parseGitHubUrl("https://github.example.com:1234"), "https://github.example.com:1234/");
t.deepEqual(util.parseGitHubUrl("https://api.github.example.com:1234"), "https://github.example.com:1234/");
t.deepEqual(util.parseGitHubUrl("https://github.example.com:1234/api/v3"), "https://github.example.com:1234/");
t.deepEqual(util.parseGitHubUrl("https://github.example.com/base/path"), "https://github.example.com/base/path/");
t.deepEqual(util.parseGitHubUrl("https://github.example.com/base/path/api/v3"), "https://github.example.com/base/path/");
t.throws(() => util.parseGitHubUrl(""), {
message: '"" is not a valid URL',
});
t.throws(() => util.parseGitHubUrl("ssh://github.com"), {
message: '"ssh://github.com" is not a http or https URL',
});
t.throws(() => util.parseGitHubUrl("http:///::::433"), {
message: '"http:///::::433" is not a valid URL',
});
});
ava_1.default("allowed API versions", async (t) => {
t.is(util.apiVersionInRange("1.33.0", "1.33", "2.0"), undefined);
t.is(util.apiVersionInRange("1.33.1", "1.33", "2.0"), undefined);
t.is(util.apiVersionInRange("1.34.0", "1.33", "2.0"), undefined);
t.is(util.apiVersionInRange("2.0.0", "1.33", "2.0"), undefined);
t.is(util.apiVersionInRange("2.0.1", "1.33", "2.0"), undefined);
t.is(util.apiVersionInRange("1.32.0", "1.33", "2.0"), util.DisallowedAPIVersionReason.ACTION_TOO_NEW);
t.is(util.apiVersionInRange("2.1.0", "1.33", "2.0"), util.DisallowedAPIVersionReason.ACTION_TOO_OLD);
});
function mockGetMetaVersionHeader(versionHeader) {
// Passing an auth token is required, so we just use a dummy value
const client = github.getOctokit("123");
const response = {
headers: {
"x-github-enterprise-version": versionHeader,
},
};
const spyGetContents = sinon_1.default
.stub(client.meta, "get")
.resolves(response);
sinon_1.default.stub(api, "getApiClient").value(() => client);
return spyGetContents;
}
ava_1.default("getGitHubVersion", async (t) => {
const v = await util.getGitHubVersion({
auth: "",
url: "https://github.com",
});
t.deepEqual(util.GitHubVariant.DOTCOM, v.type);
mockGetMetaVersionHeader("2.0");
const v2 = await util.getGitHubVersion({
auth: "",
url: "https://ghe.example.com",
});
t.deepEqual({ type: util.GitHubVariant.GHES, version: "2.0" }, v2);
mockGetMetaVersionHeader("GitHub AE");
const ghae = await util.getGitHubVersion({
auth: "",
url: "https://example.githubenterprise.com",
});
t.deepEqual({ type: util.GitHubVariant.GHAE }, ghae);
mockGetMetaVersionHeader(undefined);
const v3 = await util.getGitHubVersion({
auth: "",
url: "https://ghe.example.com",
});
t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3);
});
ava_1.default("getGitHubAuth", async (t) => {
const msgs = [];
const mockLogger = {
warning: (msg) => msgs.push(msg),
};
// eslint-disable-next-line @typescript-eslint/no-floating-promises
t.throwsAsync(async () => util.getGitHubAuth(mockLogger, "abc", true));
process.env.GITHUB_TOKEN = "123";
t.is("123", await util.getGitHubAuth(mockLogger, undefined, undefined));
t.is(msgs.length, 0);
t.is("abc", await util.getGitHubAuth(mockLogger, "abc", undefined));
t.is(msgs.length, 1); // warning expected
msgs.length = 0;
await mockStdInForAuth(t, mockLogger, "def", "def");
await mockStdInForAuth(t, mockLogger, "def", "", "def");
await mockStdInForAuth(t, mockLogger, "def", "def\n some extra garbage", "ghi");
await mockStdInForAuth(t, mockLogger, "defghi", "def", "ghi\n123");
await mockStdInForAuthExpectError(t, mockLogger, "");
await mockStdInForAuthExpectError(t, mockLogger, "", " ", "abc");
await mockStdInForAuthExpectError(t, mockLogger, " def\n some extra garbage", "ghi");
t.is(msgs.length, 0);
});
async function mockStdInForAuth(t, mockLogger, expected, ...text) {
const stdin = stream.Readable.from(text);
t.is(expected, await util.getGitHubAuth(mockLogger, undefined, true, stdin));
}
async function mockStdInForAuthExpectError(t, mockLogger, ...text) {
const stdin = stream.Readable.from(text);
await t.throwsAsync(async () => util.getGitHubAuth(mockLogger, undefined, true, stdin));
}
//# sourceMappingURL=util.test.js.map

File diff suppressed because one or more lines are too long

2
node_modules/.bin/uuid generated vendored
View File

@@ -1 +1 @@
../uuid/dist/bin/uuid
../uuid/bin/uuid

1
node_modules/.bin/which generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../which/bin/which

5555
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

View File

@@ -82,14 +82,7 @@ try {
core.warning('myInput was not set');
}
if (core.isDebug()) {
// curl -v https://github.com
} else {
// curl https://github.com
}
// Do stuff
core.info('Output to the actions build log')
}
catch (err) {
core.error(`Error ${err}, action may still succeed though`);
@@ -144,4 +137,4 @@ const core = require('@actions/core');
var pid = core.getState("pidToKill");
process.kill(pid);
```
```

View File

@@ -1,16 +1,16 @@
interface CommandProperties {
[key: string]: any;
[key: string]: string;
}
/**
* Commands
*
* Command Format:
* ::name key=value,key=value::message
* ##[name key=value;key=value]message
*
* Examples:
* ::warning::This is the message
* ::set-env name=MY_VAR::some value
* ##[warning]This is the user warning message
* ##[set-secret name=mypassword]definitelyNotAPassword!
*/
export declare function issueCommand(command: string, properties: CommandProperties, message: any): void;
export declare function issueCommand(command: string, properties: CommandProperties, message: string): void;
export declare function issue(name: string, message?: string): void;
export {};

View File

@@ -1,23 +1,15 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const os = __importStar(require("os"));
const utils_1 = require("./utils");
const os = require("os");
/**
* Commands
*
* Command Format:
* ::name key=value,key=value::message
* ##[name key=value;key=value]message
*
* Examples:
* ::warning::This is the message
* ::set-env name=MY_VAR::some value
* ##[warning]This is the user warning message
* ##[set-secret name=mypassword]definitelyNotAPassword!
*/
function issueCommand(command, properties, message) {
const cmd = new Command(command, properties, message);
@@ -42,38 +34,33 @@ class Command {
let cmdStr = CMD_STRING + this.command;
if (this.properties && Object.keys(this.properties).length > 0) {
cmdStr += ' ';
let first = true;
for (const key in this.properties) {
if (this.properties.hasOwnProperty(key)) {
const val = this.properties[key];
if (val) {
if (first) {
first = false;
}
else {
cmdStr += ',';
}
cmdStr += `${key}=${escapeProperty(val)}`;
// safely append the val - avoid blowing up when attempting to
// call .replace() if message is not a string for some reason
cmdStr += `${key}=${escape(`${val || ''}`)},`;
}
}
}
}
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
cmdStr += CMD_STRING;
// safely append the message - avoid blowing up when attempting to
// call .replace() if message is not a string for some reason
const message = `${this.message || ''}`;
cmdStr += escapeData(message);
return cmdStr;
}
}
function escapeData(s) {
return utils_1.toCommandValue(s)
.replace(/%/g, '%25')
.replace(/\r/g, '%0D')
.replace(/\n/g, '%0A');
return s.replace(/\r/g, '%0D').replace(/\n/g, '%0A');
}
function escapeProperty(s) {
return utils_1.toCommandValue(s)
.replace(/%/g, '%25')
function escape(s) {
return s
.replace(/\r/g, '%0D')
.replace(/\n/g, '%0A')
.replace(/:/g, '%3A')
.replace(/,/g, '%2C');
.replace(/]/g, '%5D')
.replace(/;/g, '%3B');
}
//# sourceMappingURL=command.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"command.js","sourceRoot":"","sources":["../src/command.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAwB;AACxB,mCAAsC;AAWtC;;;;;;;;;GASG;AACH,SAAgB,YAAY,CAC1B,OAAe,EACf,UAA6B,EAC7B,OAAY;IAEZ,MAAM,GAAG,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,CAAC,CAAA;IACrD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AAC/C,CAAC;AAPD,oCAOC;AAED,SAAgB,KAAK,CAAC,IAAY,EAAE,UAAkB,EAAE;IACtD,YAAY,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACjC,CAAC;AAFD,sBAEC;AAED,MAAM,UAAU,GAAG,IAAI,CAAA;AAEvB,MAAM,OAAO;IAKX,YAAY,OAAe,EAAE,UAA6B,EAAE,OAAe;QACzE,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,iBAAiB,CAAA;SAC5B;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAED,QAAQ;QACN,IAAI,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC,OAAO,CAAA;QAEtC,IAAI,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YAC9D,MAAM,IAAI,GAAG,CAAA;YACb,IAAI,KAAK,GAAG,IAAI,CAAA;YAChB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,UAAU,EAAE;gBACjC,IAAI,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;oBACvC,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;oBAChC,IAAI,GAAG,EAAE;wBACP,IAAI,KAAK,EAAE;4BACT,KAAK,GAAG,KAAK,CAAA;yBACd;6BAAM;4BACL,MAAM,IAAI,GAAG,CAAA;yBACd;wBAED,MAAM,IAAI,GAAG,GAAG,IAAI,cAAc,CAAC,GAAG,CAAC,EAAE,CAAA;qBAC1C;iBACF;aACF;SACF;QAED,MAAM,IAAI,GAAG,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAA;QACpD,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAED,SAAS,UAAU,CAAC,CAAM;IACxB,OAAO,sBAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;AAC1B,CAAC;AAED,SAAS,cAAc,CAAC,CAAM;IAC5B,OAAO,sBAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;AACzB,CAAC"}
{"version":3,"file":"command.js","sourceRoot":"","sources":["../src/command.ts"],"names":[],"mappings":";;AAAA,yBAAwB;AAQxB;;;;;;;;;GASG;AACH,SAAgB,YAAY,CAC1B,OAAe,EACf,UAA6B,EAC7B,OAAe;IAEf,MAAM,GAAG,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,CAAC,CAAA;IACrD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AAC/C,CAAC;AAPD,oCAOC;AAED,SAAgB,KAAK,CAAC,IAAY,EAAE,UAAkB,EAAE;IACtD,YAAY,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACjC,CAAC;AAFD,sBAEC;AAED,MAAM,UAAU,GAAG,IAAI,CAAA;AAEvB,MAAM,OAAO;IAKX,YAAY,OAAe,EAAE,UAA6B,EAAE,OAAe;QACzE,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,iBAAiB,CAAA;SAC5B;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAED,QAAQ;QACN,IAAI,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC,OAAO,CAAA;QAEtC,IAAI,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YAC9D,MAAM,IAAI,GAAG,CAAA;YACb,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,UAAU,EAAE;gBACjC,IAAI,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;oBACvC,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;oBAChC,IAAI,GAAG,EAAE;wBACP,8DAA8D;wBAC9D,6DAA6D;wBAC7D,MAAM,IAAI,GAAG,GAAG,IAAI,MAAM,CAAC,GAAG,GAAG,IAAI,EAAE,EAAE,CAAC,GAAG,CAAA;qBAC9C;iBACF;aACF;SACF;QAED,MAAM,IAAI,UAAU,CAAA;QAEpB,kEAAkE;QAClE,6DAA6D;QAC7D,MAAM,OAAO,GAAG,GAAG,IAAI,CAAC,OAAO,IAAI,EAAE,EAAE,CAAA;QACvC,MAAM,IAAI,UAAU,CAAC,OAAO,CAAC,CAAA;QAE7B,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAED,SAAS,UAAU,CAAC,CAAS;IAC3B,OAAO,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;AACtD,CAAC;AAED,SAAS,MAAM,CAAC,CAAS;IACvB,OAAO,CAAC;SACL,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;AACzB,CAAC"}

View File

@@ -21,9 +21,9 @@ export declare enum ExitCode {
/**
* Sets env variable for this action and future actions in the job
* @param name the name of the variable to set
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
* @param val the value of the variable
*/
export declare function exportVariable(name: string, val: any): void;
export declare function exportVariable(name: string, val: string): void;
/**
* Registers a secret which will get masked from logs
* @param secret value of the secret
@@ -46,25 +46,15 @@ export declare function getInput(name: string, options?: InputOptions): string;
* Sets the value of an output.
*
* @param name name of the output to set
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
* @param value value to store
*/
export declare function setOutput(name: string, value: any): void;
/**
* Enables or disables the echoing of commands into stdout for the rest of the step.
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
*
*/
export declare function setCommandEcho(enabled: boolean): void;
export declare function setOutput(name: string, value: string): void;
/**
* Sets the action status to failed.
* When the action exits it will be with an exit code of 1
* @param message add error issue message
*/
export declare function setFailed(message: string | Error): void;
/**
* Gets whether Actions Step Debug is on or not
*/
export declare function isDebug(): boolean;
export declare function setFailed(message: string): void;
/**
* Writes debug message to user log
* @param message debug message
@@ -72,14 +62,14 @@ export declare function isDebug(): boolean;
export declare function debug(message: string): void;
/**
* Adds an error issue
* @param message error issue message. Errors will be converted to string via toString()
* @param message error issue message
*/
export declare function error(message: string | Error): void;
export declare function error(message: string): void;
/**
* Adds an warning issue
* @param message warning issue message. Errors will be converted to string via toString()
* @param message warning issue message
*/
export declare function warning(message: string | Error): void;
export declare function warning(message: string): void;
/**
* Writes info to log with console.log.
* @param message info message
@@ -110,9 +100,9 @@ export declare function group<T>(name: string, fn: () => Promise<T>): Promise<T>
* Saves state for current action, the state can only be retrieved by this action's post job execution.
*
* @param name name of the state to store
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
* @param value value to store
*/
export declare function saveState(name: string, value: any): void;
export declare function saveState(name: string, value: string): void;
/**
* Gets the value of an state set by this action's main execution.
*

View File

@@ -8,19 +8,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const command_1 = require("./command");
const file_command_1 = require("./file-command");
const utils_1 = require("./utils");
const os = __importStar(require("os"));
const path = __importStar(require("path"));
const os = require("os");
const path = require("path");
/**
* The code to exit an action
*/
@@ -41,21 +32,11 @@ var ExitCode;
/**
* Sets env variable for this action and future actions in the job
* @param name the name of the variable to set
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
* @param val the value of the variable
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable(name, val) {
const convertedVal = utils_1.toCommandValue(val);
process.env[name] = convertedVal;
const filePath = process.env['GITHUB_ENV'] || '';
if (filePath) {
const delimiter = '_GitHubActionsFileCommandDelimeter_';
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
file_command_1.issueCommand('ENV', commandValue);
}
else {
command_1.issueCommand('set-env', { name }, convertedVal);
}
process.env[name] = val;
command_1.issueCommand('set-env', { name }, val);
}
exports.exportVariable = exportVariable;
/**
@@ -71,13 +52,7 @@ exports.setSecret = setSecret;
* @param inputPath
*/
function addPath(inputPath) {
const filePath = process.env['GITHUB_PATH'] || '';
if (filePath) {
file_command_1.issueCommand('PATH', inputPath);
}
else {
command_1.issueCommand('add-path', {}, inputPath);
}
command_1.issueCommand('add-path', {}, inputPath);
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
}
exports.addPath = addPath;
@@ -100,22 +75,12 @@ exports.getInput = getInput;
* Sets the value of an output.
*
* @param name name of the output to set
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
* @param value value to store
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput(name, value) {
command_1.issueCommand('set-output', { name }, value);
}
exports.setOutput = setOutput;
/**
* Enables or disables the echoing of commands into stdout for the rest of the step.
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
*
*/
function setCommandEcho(enabled) {
command_1.issue('echo', enabled ? 'on' : 'off');
}
exports.setCommandEcho = setCommandEcho;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
@@ -132,13 +97,6 @@ exports.setFailed = setFailed;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/**
* Gets whether Actions Step Debug is on or not
*/
function isDebug() {
return process.env['RUNNER_DEBUG'] === '1';
}
exports.isDebug = isDebug;
/**
* Writes debug message to user log
* @param message debug message
@@ -149,18 +107,18 @@ function debug(message) {
exports.debug = debug;
/**
* Adds an error issue
* @param message error issue message. Errors will be converted to string via toString()
* @param message error issue message
*/
function error(message) {
command_1.issue('error', message instanceof Error ? message.toString() : message);
command_1.issue('error', message);
}
exports.error = error;
/**
* Adds an warning issue
* @param message warning issue message. Errors will be converted to string via toString()
* @param message warning issue message
*/
function warning(message) {
command_1.issue('warning', message instanceof Error ? message.toString() : message);
command_1.issue('warning', message);
}
exports.warning = warning;
/**
@@ -218,9 +176,8 @@ exports.group = group;
* Saves state for current action, the state can only be retrieved by this action's post job execution.
*
* @param name name of the state to store
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
* @param value value to store
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState(name, value) {
command_1.issueCommand('save-state', { name }, value);
}

View File

@@ -1 +1 @@
{"version":3,"file":"core.js","sourceRoot":"","sources":["../src/core.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,uCAA6C;AAC7C,iDAA+D;AAC/D,mCAAsC;AAEtC,uCAAwB;AACxB,2CAA4B;AAU5B;;GAEG;AACH,IAAY,QAUX;AAVD,WAAY,QAAQ;IAClB;;OAEG;IACH,6CAAW,CAAA;IAEX;;OAEG;IACH,6CAAW,CAAA;AACb,CAAC,EAVW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAUnB;AAED,yEAAyE;AACzE,YAAY;AACZ,yEAAyE;AAEzE;;;;GAIG;AACH,8DAA8D;AAC9D,SAAgB,cAAc,CAAC,IAAY,EAAE,GAAQ;IACnD,MAAM,YAAY,GAAG,sBAAc,CAAC,GAAG,CAAC,CAAA;IACxC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,YAAY,CAAA;IAEhC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE,CAAA;IAChD,IAAI,QAAQ,EAAE;QACZ,MAAM,SAAS,GAAG,qCAAqC,CAAA;QACvD,MAAM,YAAY,GAAG,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE,CAAC,GAAG,GAAG,YAAY,GAAG,EAAE,CAAC,GAAG,GAAG,SAAS,EAAE,CAAA;QACzF,2BAAgB,CAAC,KAAK,EAAE,YAAY,CAAC,CAAA;KACtC;SAAM;QACL,sBAAY,CAAC,SAAS,EAAE,EAAC,IAAI,EAAC,EAAE,YAAY,CAAC,CAAA;KAC9C;AACH,CAAC;AAZD,wCAYC;AAED;;;GAGG;AACH,SAAgB,SAAS,CAAC,MAAc;IACtC,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,MAAM,CAAC,CAAA;AACtC,CAAC;AAFD,8BAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,SAAiB;IACvC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;IACjD,IAAI,QAAQ,EAAE;QACZ,2BAAgB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAA;KACpC;SAAM;QACL,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,SAAS,CAAC,CAAA;KACxC;IACD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,GAAG,SAAS,GAAG,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAA;AAC7E,CAAC;AARD,0BAQC;AAED;;;;;;GAMG;AACH,SAAgB,QAAQ,CAAC,IAAY,EAAE,OAAsB;IAC3D,MAAM,GAAG,GACP,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC,IAAI,EAAE,CAAA;IACrE,IAAI,OAAO,IAAI,OAAO,CAAC,QAAQ,IAAI,CAAC,GAAG,EAAE;QACvC,MAAM,IAAI,KAAK,CAAC,oCAAoC,IAAI,EAAE,CAAC,CAAA;KAC5D;IAED,OAAO,GAAG,CAAC,IAAI,EAAE,CAAA;AACnB,CAAC;AARD,4BAQC;AAED;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAFD,8BAEC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,OAAgB;IAC7C,eAAK,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAA;AACvC,CAAC;AAFD,wCAEC;AAED,yEAAyE;AACzE,UAAU;AACV,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,SAAS,CAAC,OAAuB;IAC/C,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAA;IAEnC,KAAK,CAAC,OAAO,CAAC,CAAA;AAChB,CAAC;AAJD,8BAIC;AAED,yEAAyE;AACzE,mBAAmB;AACnB,yEAAyE;AAEzE;;GAEG;AACH,SAAgB,OAAO;IACrB,OAAO,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,GAAG,CAAA;AAC5C,CAAC;AAFD,0BAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,sBAAY,CAAC,OAAO,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACpC,CAAC;AAFD,sBAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAuB;IAC3C,eAAK,CAAC,OAAO,EAAE,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAA;AACzE,CAAC;AAFD,sBAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,OAAuB;IAC7C,eAAK,CAAC,SAAS,EAAE,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAA;AAC3E,CAAC;AAFD,0BAEC;AAED;;;GAGG;AACH,SAAgB,IAAI,CAAC,OAAe;IAClC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,oBAEC;AAED;;;;;;GAMG;AACH,SAAgB,UAAU,CAAC,IAAY;IACrC,eAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;AACtB,CAAC;AAFD,gCAEC;AAED;;GAEG;AACH,SAAgB,QAAQ;IACtB,eAAK,CAAC,UAAU,CAAC,CAAA;AACnB,CAAC;AAFD,4BAEC;AAED;;;;;;;GAOG;AACH,SAAsB,KAAK,CAAI,IAAY,EAAE,EAAoB;;QAC/D,UAAU,CAAC,IAAI,CAAC,CAAA;QAEhB,IAAI,MAAS,CAAA;QAEb,IAAI;YACF,MAAM,GAAG,MAAM,EAAE,EAAE,CAAA;SACpB;gBAAS;YACR,QAAQ,EAAE,CAAA;SACX;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CAAA;AAZD,sBAYC;AAED,yEAAyE;AACzE,uBAAuB;AACvB,yEAAyE;AAEzE;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAFD,8BAEC;AAED;;;;;GAKG;AACH,SAAgB,QAAQ,CAAC,IAAY;IACnC,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,EAAE,CAAC,IAAI,EAAE,CAAA;AAC3C,CAAC;AAFD,4BAEC"}
{"version":3,"file":"core.js","sourceRoot":"","sources":["../src/core.ts"],"names":[],"mappings":";;;;;;;;;;;AAAA,uCAA6C;AAE7C,yBAAwB;AACxB,6BAA4B;AAU5B;;GAEG;AACH,IAAY,QAUX;AAVD,WAAY,QAAQ;IAClB;;OAEG;IACH,6CAAW,CAAA;IAEX;;OAEG;IACH,6CAAW,CAAA;AACb,CAAC,EAVW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAUnB;AAED,yEAAyE;AACzE,YAAY;AACZ,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,cAAc,CAAC,IAAY,EAAE,GAAW;IACtD,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,GAAG,CAAA;IACvB,sBAAY,CAAC,SAAS,EAAE,EAAC,IAAI,EAAC,EAAE,GAAG,CAAC,CAAA;AACtC,CAAC;AAHD,wCAGC;AAED;;;GAGG;AACH,SAAgB,SAAS,CAAC,MAAc;IACtC,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,MAAM,CAAC,CAAA;AACtC,CAAC;AAFD,8BAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,SAAiB;IACvC,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,SAAS,CAAC,CAAA;IACvC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,GAAG,SAAS,GAAG,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAA;AAC7E,CAAC;AAHD,0BAGC;AAED;;;;;;GAMG;AACH,SAAgB,QAAQ,CAAC,IAAY,EAAE,OAAsB;IAC3D,MAAM,GAAG,GACP,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC,IAAI,EAAE,CAAA;IACrE,IAAI,OAAO,IAAI,OAAO,CAAC,QAAQ,IAAI,CAAC,GAAG,EAAE;QACvC,MAAM,IAAI,KAAK,CAAC,oCAAoC,IAAI,EAAE,CAAC,CAAA;KAC5D;IAED,OAAO,GAAG,CAAC,IAAI,EAAE,CAAA;AACnB,CAAC;AARD,4BAQC;AAED;;;;;GAKG;AACH,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAa;IACnD,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAFD,8BAEC;AAED,yEAAyE;AACzE,UAAU;AACV,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,SAAS,CAAC,OAAe;IACvC,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAA;IACnC,KAAK,CAAC,OAAO,CAAC,CAAA;AAChB,CAAC;AAHD,8BAGC;AAED,yEAAyE;AACzE,mBAAmB;AACnB,yEAAyE;AAEzE;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,sBAAY,CAAC,OAAO,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACpC,CAAC;AAFD,sBAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,eAAK,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;AACzB,CAAC;AAFD,sBAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,OAAe;IACrC,eAAK,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;AAC3B,CAAC;AAFD,0BAEC;AAED;;;GAGG;AACH,SAAgB,IAAI,CAAC,OAAe;IAClC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,oBAEC;AAED;;;;;;GAMG;AACH,SAAgB,UAAU,CAAC,IAAY;IACrC,eAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;AACtB,CAAC;AAFD,gCAEC;AAED;;GAEG;AACH,SAAgB,QAAQ;IACtB,eAAK,CAAC,UAAU,CAAC,CAAA;AACnB,CAAC;AAFD,4BAEC;AAED;;;;;;;GAOG;AACH,SAAsB,KAAK,CAAI,IAAY,EAAE,EAAoB;;QAC/D,UAAU,CAAC,IAAI,CAAC,CAAA;QAEhB,IAAI,MAAS,CAAA;QAEb,IAAI;YACF,MAAM,GAAG,MAAM,EAAE,EAAE,CAAA;SACpB;gBAAS;YACR,QAAQ,EAAE,CAAA;SACX;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CAAA;AAZD,sBAYC;AAED,yEAAyE;AACzE,uBAAuB;AACvB,yEAAyE;AAEzE;;;;;GAKG;AACH,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAa;IACnD,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAFD,8BAEC;AAED;;;;;GAKG;AACH,SAAgB,QAAQ,CAAC,IAAY;IACnC,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,EAAE,CAAC,IAAI,EAAE,CAAA;AAC3C,CAAC;AAFD,4BAEC"}

View File

@@ -1 +0,0 @@
export declare function issueCommand(command: string, message: any): void;

View File

@@ -1,29 +0,0 @@
"use strict";
// For internal use, subject to change.
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const utils_1 = require("./utils");
function issueCommand(command, message) {
const filePath = process.env[`GITHUB_${command}`];
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
encoding: 'utf8'
});
}
exports.issueCommand = issueCommand;
//# sourceMappingURL=file-command.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"file-command.js","sourceRoot":"","sources":["../src/file-command.ts"],"names":[],"mappings":";AAAA,uCAAuC;;;;;;;;;AAEvC,mCAAmC;AACnC,uDAAuD;AAEvD,uCAAwB;AACxB,uCAAwB;AACxB,mCAAsC;AAEtC,SAAgB,YAAY,CAAC,OAAe,EAAE,OAAY;IACxD,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,OAAO,EAAE,CAAC,CAAA;IACjD,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,IAAI,KAAK,CACb,wDAAwD,OAAO,EAAE,CAClE,CAAA;KACF;IACD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;QAC5B,MAAM,IAAI,KAAK,CAAC,yBAAyB,QAAQ,EAAE,CAAC,CAAA;KACrD;IAED,EAAE,CAAC,cAAc,CAAC,QAAQ,EAAE,GAAG,sBAAc,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,EAAE;QACjE,QAAQ,EAAE,MAAM;KACjB,CAAC,CAAA;AACJ,CAAC;AAdD,oCAcC"}

View File

@@ -1,5 +0,0 @@
/**
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @param input input to sanitize into a string
*/
export declare function toCommandValue(input: any): string;

View File

@@ -1,19 +0,0 @@
"use strict";
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object.defineProperty(exports, "__esModule", { value: true });
/**
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @param input input to sanitize into a string
*/
function toCommandValue(input) {
if (input === null || input === undefined) {
return '';
}
else if (typeof input === 'string' || input instanceof String) {
return input;
}
return JSON.stringify(input);
}
exports.toCommandValue = toCommandValue;
//# sourceMappingURL=utils.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":";AAAA,mCAAmC;AACnC,uDAAuD;;AAEvD;;;GAGG;AACH,SAAgB,cAAc,CAAC,KAAU;IACvC,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,OAAO,EAAE,CAAA;KACV;SAAM,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,YAAY,MAAM,EAAE;QAC/D,OAAO,KAAe,CAAA;KACvB;IACD,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAA;AAC9B,CAAC;AAPD,wCAOC"}

View File

@@ -1,23 +1,21 @@
{
"name": "@actions/core",
"version": "1.2.6",
"version": "1.2.0",
"description": "Actions core lib",
"keywords": [
"github",
"actions",
"core"
],
"homepage": "https://github.com/actions/toolkit/tree/main/packages/core",
"homepage": "https://github.com/actions/toolkit/tree/master/packages/core",
"license": "MIT",
"main": "lib/core.js",
"types": "lib/core.d.ts",
"directories": {
"lib": "lib",
"test": "__tests__"
},
"files": [
"lib",
"!.DS_Store"
"lib"
],
"publishConfig": {
"access": "public"
@@ -28,7 +26,6 @@
"directory": "packages/core"
},
"scripts": {
"audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json",
"test": "echo \"Error: run tests from root\" && exit 1",
"tsc": "tsc"
},

View File

@@ -4,7 +4,7 @@
## Usage
Returns an authenticated Octokit client that follows the machine [proxy settings](https://help.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners) and correctly sets GHES base urls. See https://octokit.github.io/rest.js for the API.
Returns an authenticated Octokit client that follows the machine [proxy settings](https://help.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners). See https://octokit.github.io/rest.js for the API.
```js
const github = require('@actions/github');
@@ -17,10 +17,7 @@ async function run() {
// https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token#about-the-github_token-secret
const myToken = core.getInput('myToken');
const octokit = github.getOctokit(myToken)
// You can also pass in additional options as a second parameter to getOctokit
// const octokit = github.getOctokit(myToken, {userAgent: "MyActionVersion1"});
const octokit = new github.GitHub(myToken);
const { data: pullRequest } = await octokit.pulls.get({
owner: 'octokit',
@@ -37,6 +34,8 @@ async function run() {
run();
```
You can pass client options, as specified by [Octokit](https://octokit.github.io/rest.js/), as a second argument to the `GitHub` constructor.
You can also make GraphQL requests. See https://github.com/octokit/graphql.js for the API.
```js
@@ -73,25 +72,3 @@ if (github.context.eventName === 'push') {
core.info(`The head commit is: ${pushPayload.head}`)
}
```
## Extending the Octokit instance
`@octokit/core` now supports the [plugin architecture](https://github.com/octokit/core.js#plugins). You can extend the GitHub instance using plugins.
For example, using the `@octokit/plugin-enterprise-server` you can now access enterprise admin apis on GHES instances.
```ts
import { GitHub, getOctokitOptions } from '@actions/github/lib/utils'
import { enterpriseServer220Admin } from '@octokit/plugin-enterprise-server'
const octokit = GitHub.plugin(enterpriseServer220Admin)
// or override some of the default values as well
// const octokit = GitHub.plugin(enterpriseServer220Admin).defaults({userAgent: "MyNewUserAgent"})
const myToken = core.getInput('myToken');
const myOctokit = new octokit(getOctokitOptions(token))
// Create a new user
myOctokit.enterpriseAdmin.createUser({
login: "testuser",
email: "testuser@test.com",
});
```

Some files were not shown because too many files have changed in this diff Show More