Compare commits

..

1 Commits

Author SHA1 Message Date
github-actions[bot]
2e7423838d Update changelog for v3.28.7 2025-01-29 08:16:30 +00:00
7781 changed files with 666936 additions and 654239 deletions

View File

@@ -61,12 +61,11 @@ runs:
- name: Check config
working-directory: ${{ github.action_path }}
shell: bash
env:
EXPECTED_CONFIG_FILE_CONTENTS: '${{ inputs.expected-config-file-contents }}'
run: ts-node ./index.ts "$RUNNER_TEMP/user-config.yaml" "$EXPECTED_CONFIG_FILE_CONTENTS"
run: ts-node ./index.ts "${{ runner.temp }}/user-config.yaml" '${{ inputs.expected-config-file-contents }}'
- name: Clean up
shell: bash
if: always()
run: |
rm -rf $RUNNER_TEMP/codescanning-config-cli-test
rm -rf $RUNNER_TEMP/user-config.yaml
rm -rf ${{ runner.temp }}/codescanning-config-cli-test
rm -rf ${{ runner.temp }}/user-config.yaml

View File

@@ -8,7 +8,7 @@ const actualConfig = loadActualConfig()
const rawExpectedConfig = process.argv[3].trim()
if (!rawExpectedConfig) {
core.setFailed('No expected configuration provided')
core.info('No expected configuration provided')
} else {
core.startGroup('Expected generated user config')
core.info(yaml.dump(JSON.parse(rawExpectedConfig)))

View File

@@ -29,27 +29,24 @@ runs:
- id: get-url
name: Determine URL
shell: bash
env:
VERSION: ${{ inputs.version }}
USE_ALL_PLATFORM_BUNDLE: ${{ inputs.use-all-platform-bundle }}
run: |
set -e # Fail this Action if `gh release list` fails.
if [[ "$VERSION" == "linked" ]]; then
if [[ ${{ inputs.version }} == "linked" ]]; then
echo "tools-url=linked" >> "$GITHUB_OUTPUT"
exit 0
elif [[ "$VERSION" == "default" ]]; then
elif [[ ${{ inputs.version }} == "default" ]]; then
echo "tools-url=" >> "$GITHUB_OUTPUT"
exit 0
fi
if [[ "$VERSION" == "nightly-latest" && "$RUNNER_OS" != "Windows" ]]; then
if [[ ${{ inputs.version }} == "nightly-latest" && "$RUNNER_OS" != "Windows" ]]; then
extension="tar.zst"
else
extension="tar.gz"
fi
if [[ "$USE_ALL_PLATFORM_BUNDLE" == "true" ]]; then
if [[ ${{ inputs.use-all-platform-bundle }} == "true" ]]; then
artifact_name="codeql-bundle.$extension"
elif [[ "$RUNNER_OS" == "Linux" ]]; then
artifact_name="codeql-bundle-linux64.$extension"
@@ -62,14 +59,14 @@ runs:
exit 1
fi
if [[ "$VERSION" == "nightly-latest" ]]; then
if [[ ${{ inputs.version }} == "nightly-latest" ]]; then
tag=`gh release list --repo dsp-testing/codeql-cli-nightlies -L 1 | cut -f 3`
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$tag/$artifact_name" >> $GITHUB_OUTPUT
elif [[ "$VERSION" == *"nightly"* ]]; then
version=`echo "$VERSION" | sed -e 's/^.*\-//'`
elif [[ ${{ inputs.version }} == *"nightly"* ]]; then
version=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$version/$artifact_name" >> $GITHUB_OUTPUT
elif [[ "$VERSION" == *"stable"* ]]; then
version=`echo "$VERSION" | sed -e 's/^.*\-//'`
elif [[ ${{ inputs.version }} == *"stable"* ]]; then
version=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
echo "tools-url=https://github.com/github/codeql-action/releases/download/codeql-bundle-$version/$artifact_name" >> $GITHUB_OUTPUT
else
echo "::error::Unrecognized version specified!"

View File

@@ -18,11 +18,8 @@ runs:
using: "composite"
steps:
- id: branches
env:
MAJOR_VERSION: ${{ inputs.major_version }}
LATEST_TAG: ${{ inputs.latest_tag }}
run: |
python ${{ github.action_path }}/release-branches.py \
--major-version "$MAJOR_VERSION" \
--latest-tag "$LATEST_TAG"
--major-version ${{ inputs.major_version }} \
--latest-tag ${{ inputs.latest_tag }}
shell: bash

View File

@@ -1,4 +0,0 @@
# Configuration for the CodeQL Actions Queries
name: "CodeQL Actions Queries config"
queries:
- uses: security-and-quality

View File

@@ -77,7 +77,7 @@ jobs:
setup-kotlin: 'true'
- uses: actions/setup-go@v5
with:
go-version: ~1.24.0
go-version: ~1.23.0
# to avoid potentially misleading autobuilder results where we expect it to download
# dependencies successfully, but they actually come from a warm cache
cache: false

View File

@@ -77,7 +77,7 @@ jobs:
setup-kotlin: 'true'
- uses: actions/setup-go@v5
with:
go-version: ~1.24.0
go-version: ~1.23.0
# to avoid potentially misleading autobuilder results where we expect it to download
# dependencies successfully, but they actually come from a warm cache
cache: false

View File

@@ -77,7 +77,7 @@ jobs:
setup-kotlin: 'true'
- uses: actions/setup-go@v5
with:
go-version: ~1.24.0
go-version: ~1.23.0
# to avoid potentially misleading autobuilder results where we expect it to download
# dependencies successfully, but they actually come from a warm cache
cache: false

View File

@@ -46,7 +46,7 @@ jobs:
use-all-platform-bundle: 'false'
setup-kotlin: 'true'
- name: Set up Ruby
uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252 # v1.229.0
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6
- name: Install Code Scanning integration

71
.github/workflows/__rust.yml generated vendored
View File

@@ -1,71 +0,0 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
# to regenerate this file.
name: PR Check - Rust analysis
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v*
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
schedule:
- cron: '0 5 * * *'
workflow_dispatch: {}
jobs:
rust:
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-latest
version: linked
- os: ubuntu-latest
version: default
- os: ubuntu-latest
version: nightly-latest
name: Rust analysis
permissions:
contents: read
security-events: read
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Prepare test
id: prepare-test
uses: ./.github/actions/prepare-test
with:
version: ${{ matrix.version }}
use-all-platform-bundle: 'false'
setup-kotlin: 'true'
- uses: ./../action/init
with:
languages: rust
tools: ${{ steps.prepare-test.outputs.tools-url }}
env:
CODEQL_ACTION_RUST_ANALYSIS: true
- uses: ./../action/analyze
id: analysis
with:
upload-database: false
- name: Check database
shell: bash
run: |
RUST_DB="${{ fromJson(steps.analysis.outputs.db-locations).rust }}"
if [[ ! -d "$RUST_DB" ]]; then
echo "Did not create a database for Rust."
exit 1
fi
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -70,18 +70,18 @@ jobs:
echo "Suggested matrix config for analysis job: $VERSIONS_JSON"
echo "versions=${VERSIONS_JSON}" >> $GITHUB_OUTPUT
analyze-javascript:
build:
needs: [check-codeql-versions]
strategy:
fail-fast: false
matrix:
os: [ubuntu-22.04,ubuntu-24.04,windows-2019,windows-2022,macos-13,macos-14]
os: [ubuntu-20.04,ubuntu-22.04,windows-2019,windows-2022,macos-13,macos-14]
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
runs-on: ${{ matrix.os }}
permissions:
contents: read
security-events: write
security-events: write # needed to upload results
steps:
- name: Checkout
@@ -100,27 +100,3 @@ jobs:
uses: ./analyze
with:
category: "/language:javascript"
analyze-actions:
runs-on: ubuntu-latest
strategy:
fail-fast: false
permissions:
contents: read
security-events: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Initialize CodeQL
uses: ./init
with:
languages: actions
config-file: ./.github/codeql/codeql-actions-config.yml
- name: Perform CodeQL Analysis
uses: ./analyze
with:
category: "/language:actions"

View File

@@ -168,7 +168,7 @@ jobs:
--draft
- name: Generate token
uses: actions/create-github-app-token@v2.0.2
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755
id: app-token
with:
app-id: ${{ vars.AUTOMATION_APP_ID }}

View File

@@ -124,7 +124,7 @@ jobs:
pull-requests: write # needed to create pull request
steps:
- name: Generate token
uses: actions/create-github-app-token@v2.0.2
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755
id: app-token
with:
app-id: ${{ vars.AUTOMATION_APP_ID }}

View File

@@ -19,14 +19,14 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.13"
python-version: "3.7"
- name: Checkout CodeQL Action
uses: actions/checkout@v4
- name: Checkout Enterprise Releases
uses: actions/checkout@v4
with:
repository: github/enterprise-releases
token: ${{ secrets.ENTERPRISE_RELEASE_TOKEN }}
ssh-key: ${{ secrets.ENTERPRISE_RELEASES_SSH_KEY }}
path: ${{ github.workspace }}/enterprise-releases/
- name: Update Supported Enterprise Server Versions
run: |

View File

@@ -1,20 +1,20 @@
repos:
- repo: local
hooks:
- id: lint-ts
name: Lint typescript code
files: \.ts$
language: system
entry: npm run lint -- --fix
- id: compile-ts
name: Compile typescript
files: \.[tj]s$
language: system
entry: npm run build
pass_filenames: false
- id: lint-ts
name: Lint typescript code
files: \.ts$
language: system
entry: npm run lint -- --fix
- id: pr-checks-sync
name: Synchronize PR check workflows
files: ^.github/workflows/__.*\.yml$|^pr-checks
language: system
entry: pr-checks/sync.sh
entry: python3 pr-checks/sync.py
pass_filenames: false

View File

@@ -2,48 +2,6 @@
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
## [UNRELEASED]
No user facing changes.
## 3.28.16 - 23 Apr 2025
- Update default CodeQL bundle version to 2.21.1. [#2863](https://github.com/github/codeql-action/pull/2863)
## 3.28.15 - 07 Apr 2025
- Fix bug where the action would fail if it tried to produce a debug artifact with more than 65535 files. [#2842](https://github.com/github/codeql-action/pull/2842)
## 3.28.14 - 07 Apr 2025
- Update default CodeQL bundle version to 2.21.0. [#2838](https://github.com/github/codeql-action/pull/2838)
## 3.28.13 - 24 Mar 2025
No user facing changes.
## 3.28.12 - 19 Mar 2025
- Dependency caching should now cache more dependencies for Java `build-mode: none` extractions. This should speed up workflows and avoid inconsistent alerts in some cases.
- Update default CodeQL bundle version to 2.20.7. [#2810](https://github.com/github/codeql-action/pull/2810)
## 3.28.11 - 07 Mar 2025
- Update default CodeQL bundle version to 2.20.6. [#2793](https://github.com/github/codeql-action/pull/2793)
## 3.28.10 - 21 Feb 2025
- Update default CodeQL bundle version to 2.20.5. [#2772](https://github.com/github/codeql-action/pull/2772)
- Address an issue where the CodeQL Bundle would occasionally fail to decompress on macOS. [#2768](https://github.com/github/codeql-action/pull/2768)
## 3.28.9 - 07 Feb 2025
- Update default CodeQL bundle version to 2.20.4. [#2753](https://github.com/github/codeql-action/pull/2753)
## 3.28.8 - 29 Jan 2025
- Enable support for Kotlin 2.1.10 when running with CodeQL CLI v2.20.3. [#2744](https://github.com/github/codeql-action/pull/2744)
## 3.28.7 - 29 Jan 2025
No user facing changes.

View File

@@ -1,30 +0,0 @@
# Perform all working copy cleanup operations
all: lint sync
# Lint source typescript
lint:
npm run lint-fix
# Sync generated files (javascript and PR checks)
sync: build update-pr-checks
# Perform all necessary steps to update the PR checks
update-pr-checks:
pr-checks/sync.sh
# Transpile typescript code into javascript
build:
npm run build
# Build then run all the tests
test: build
npm run test
# Run the tests for a single file
test_file filename: build
npx ava --verbose {{filename}}
[doc("Refresh the .js build artefacts in the lib directory")]
[confirm]
refresh-lib:
rm -rf lib && npm run build

View File

@@ -38,14 +38,12 @@ Object.defineProperty(exports, "__esModule", { value: true });
* It will run after the all steps in this job, in reverse order in relation to
* other `post:` hooks.
*/
const fs = __importStar(require("fs"));
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const api_client_1 = require("./api-client");
const codeql_1 = require("./codeql");
const config_utils_1 = require("./config-utils");
const debugArtifacts = __importStar(require("./debug-artifacts"));
const dependency_caching_1 = require("./dependency-caching");
const environment_1 = require("./environment");
const logging_1 = require("./logging");
const util_1 = require("./util");
@@ -62,19 +60,7 @@ async function runWrapper() {
if (config !== undefined) {
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
const version = await codeql.getVersion();
await debugArtifacts.uploadCombinedSarifArtifacts(logger, config.gitHubVersion.type, version.version);
}
}
// If we analysed Java in build-mode: none, we may have downloaded dependencies
// to the temp directory. Clean these up so they don't persist unnecessarily
// long on self-hosted runners.
const javaTempDependencyDir = (0, dependency_caching_1.getJavaTempDependencyDir)();
if (fs.existsSync(javaTempDependencyDir)) {
try {
fs.rmSync(javaTempDependencyDir, { recursive: true });
}
catch (error) {
logger.info(`Failed to remove temporary Java dependencies directory: ${(0, util_1.getErrorMessage)(error)}`);
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", () => debugArtifacts.uploadCombinedSarifArtifacts(logger, config.gitHubVersion.type, version.version));
}
}
}

View File

@@ -1 +1 @@
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,uCAAyB;AAEzB,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,qCAAqC;AACrC,iDAA2C;AAC3C,kEAAoD;AACpD,6DAAgE;AAChE,+CAAuC;AACvC,uCAA6C;AAC7C,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,kFAAkF;QAClF,wFAAwF;QACxF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAC5B,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;YACF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;gBACzB,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;gBACjD,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC;gBAC1C,MAAM,cAAc,CAAC,4BAA4B,CAC/C,MAAM,EACN,MAAM,CAAC,aAAa,CAAC,IAAI,EACzB,OAAO,CAAC,OAAO,CAChB,CAAC;YACJ,CAAC;QACH,CAAC;QAED,+EAA+E;QAC/E,4EAA4E;QAC5E,+BAA+B;QAC/B,MAAM,qBAAqB,GAAG,IAAA,6CAAwB,GAAE,CAAC;QACzD,IAAI,EAAE,CAAC,UAAU,CAAC,qBAAqB,CAAC,EAAE,CAAC;YACzC,IAAI,CAAC;gBACH,EAAE,CAAC,MAAM,CAAC,qBAAqB,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YACxD,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,MAAM,CAAC,IAAI,CACT,2DAA2D,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CACpF,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,oCAAoC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC7D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,qCAAqC;AACrC,iDAA2C;AAC3C,kEAAoD;AACpD,+CAAuC;AACvC,uCAAwD;AACxD,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,WAAW,CAAC,aAAa,EAAE,CAAC;QAC5B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,kFAAkF;QAClF,wFAAwF;QACxF,IAAI,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,KAAK,MAAM,EAAE,CAAC;YACvD,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAC5B,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;YACF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;gBACzB,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;gBACjD,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC;gBAC1C,MAAM,IAAA,mBAAS,EAAC,yCAAyC,EAAE,GAAG,EAAE,CAC9D,cAAc,CAAC,4BAA4B,CACzC,MAAM,EACN,MAAM,CAAC,aAAa,CAAC,IAAI,EACzB,OAAO,CAAC,OAAO,CAChB,CACF,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,oCAAoC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC7D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

28
lib/analyze-action.js generated
View File

@@ -41,6 +41,7 @@ const fs = __importStar(require("fs"));
const path_1 = __importDefault(require("path"));
const perf_hooks_1 = require("perf_hooks");
const core = __importStar(require("@actions/core"));
const github = __importStar(require("@actions/github"));
const actionsUtil = __importStar(require("./actions-util"));
const analyze_1 = require("./analyze");
const api_client_1 = require("./api-client");
@@ -50,7 +51,6 @@ const codeql_1 = require("./codeql");
const config_utils_1 = require("./config-utils");
const database_upload_1 = require("./database-upload");
const dependency_caching_1 = require("./dependency-caching");
const diff_informed_analysis_utils_1 = require("./diff-informed-analysis-utils");
const environment_1 = require("./environment");
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
@@ -160,14 +160,6 @@ async function run() {
let dbCreationTimings = undefined;
let didUploadTrapCaches = false;
util.initializeEnvironment(actionsUtil.getActionVersion());
// Unset the CODEQL_PROXY_* environment variables, as they are not needed
// and can cause issues with the CodeQL CLI
// Check for CODEQL_PROXY_HOST: and if it is empty but set, unset it
if (process.env.CODEQL_PROXY_HOST === "") {
delete process.env.CODEQL_PROXY_HOST;
delete process.env.CODEQL_PROXY_PORT;
delete process.env.CODEQL_PROXY_CA_CERTIFICATE;
}
// Make inputs accessible in the `post` step, details at
// https://github.com/github/codeql-action/issues/2553
actionsUtil.persistInputs();
@@ -189,24 +181,22 @@ async function run() {
const outputDir = actionsUtil.getRequiredInput("output");
core.exportVariable(environment_1.EnvVar.SARIF_RESULTS_OUTPUT_DIR, outputDir);
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
util.checkActionVersion(actionsUtil.getActionVersion(), gitHubVersion);
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"], logger);
const branches = await (0, diff_informed_analysis_utils_1.getDiffInformedAnalysisBranches)(codeql, features, logger);
const diffRangePackDir = branches
? await (0, analyze_1.setupDiffInformedQueryRun)(branches, logger)
: undefined;
const pull_request = github.context.payload.pull_request;
const diffRangePackDir = pull_request &&
(await (0, analyze_1.setupDiffInformedQueryRun)(pull_request.base.ref, pull_request.head.label, codeql, logger, features));
await (0, analyze_1.warnIfGoInstalledAfterInit)(config, logger);
await runAutobuildIfLegacyGoWorkflow(config, logger);
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, codeql, config, logger);
const cleanupLevel = actionsUtil.getOptionalInput("cleanup-level") || "brutal";
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, cleanupLevel, diffRangePackDir, actionsUtil.getOptionalInput("category"), config, logger, features);
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, diffRangePackDir, actionsUtil.getOptionalInput("category"), config, logger, features);
}
if (cleanupLevel !== "none") {
await (0, analyze_1.runCleanup)(config, cleanupLevel, logger);
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
}
const dbLocations = {};
for (const language of config.languages) {
@@ -240,7 +230,7 @@ async function run() {
}
else if (uploadResult !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await uploadLib.waitForProcessing((0, repository_1.getRepositoryNwo)(), uploadResult.sarifID, (0, logging_1.getActionsLogger)());
await uploadLib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, (0, logging_1.getActionsLogger)());
}
// If we did not throw an error yet here, but we expect one, throw it.
if (actionsUtil.getOptionalInput("expect-error") === "true") {

File diff suppressed because one or more lines are too long

76
lib/analyze.js generated
View File

@@ -54,14 +54,11 @@ const actionsUtil = __importStar(require("./actions-util"));
const api_client_1 = require("./api-client");
const autobuild_1 = require("./autobuild");
const codeql_1 = require("./codeql");
const dependency_caching_1 = require("./dependency-caching");
const diagnostics_1 = require("./diagnostics");
const diff_informed_analysis_utils_1 = require("./diff-informed-analysis-utils");
const environment_1 = require("./environment");
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const tools_features_1 = require("./tools-features");
const tracer_config_1 = require("./tracer-config");
const upload_lib_1 = require("./upload-lib");
@@ -104,14 +101,6 @@ async function runExtraction(codeql, config, logger) {
config.buildMode === util_1.BuildMode.Autobuild) {
await (0, autobuild_1.setupCppAutobuild)(codeql, logger);
}
// The Java `build-mode: none` extractor places dependencies (.jar files) in the
// database scratch directory by default. For dependency caching purposes, we want
// a stable path that caches can be restored into and that we can cache at the
// end of the workflow (i.e. that does not get removed when the scratch directory is).
if (language === languages_1.Language.java && config.buildMode === util_1.BuildMode.None) {
process.env["CODEQL_EXTRACTOR_JAVA_OPTION_BUILDLESS_DEPENDENCY_DIR"] =
(0, dependency_caching_1.getJavaTempDependencyDir)();
}
await codeql.extractUsingBuildMode(config, language);
}
else {
@@ -162,13 +151,21 @@ async function finalizeDatabaseCreation(codeql, config, threadsFlag, memoryFlag,
/**
* Set up the diff-informed analysis feature.
*
* @param baseRef The base branch name, used for calculating the diff range.
* @param headLabel The label that uniquely identifies the head branch across
* repositories, used for calculating the diff range.
* @param codeql
* @param logger
* @param features
* @returns Absolute path to the directory containing the extension pack for
* the diff range information, or `undefined` if the feature is disabled.
*/
async function setupDiffInformedQueryRun(branches, logger) {
async function setupDiffInformedQueryRun(baseRef, headLabel, codeql, logger, features) {
if (!(await features.getValue(feature_flags_1.Feature.DiffInformedQueries, codeql))) {
return undefined;
}
return await (0, logging_1.withGroupAsync)("Generating diff range extension pack", async () => {
logger.info(`Calculating diff ranges for ${branches.base}...${branches.head}`);
const diffRanges = await getPullRequestEditedDiffRanges(branches, logger);
const diffRanges = await getPullRequestEditedDiffRanges(baseRef, headLabel, logger);
const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges);
if (packDir === undefined) {
logger.warning("Cannot create diff range extension pack for diff-informed queries; " +
@@ -183,15 +180,17 @@ async function setupDiffInformedQueryRun(branches, logger) {
/**
* Return the file line ranges that were added or modified in the pull request.
*
* @param branches The base and head branches of the pull request.
* @param baseRef The base branch name, used for calculating the diff range.
* @param headLabel The label that uniquely identifies the head branch across
* repositories, used for calculating the diff range.
* @param logger
* @returns An array of tuples, where each tuple contains the absolute path of a
* file, the start line and the end line (both 1-based and inclusive) of an
* added or modified range in that file. Returns `undefined` if the action was
* not triggered by a pull request or if there was an error.
*/
async function getPullRequestEditedDiffRanges(branches, logger) {
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
async function getPullRequestEditedDiffRanges(baseRef, headLabel, logger) {
const fileDiffs = await getFileDiffsWithBasehead(baseRef, headLabel, logger);
if (fileDiffs === undefined) {
return undefined;
}
@@ -214,15 +213,15 @@ async function getPullRequestEditedDiffRanges(branches, logger) {
}
return results;
}
async function getFileDiffsWithBasehead(branches, logger) {
// Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back
// to GITHUB_REPOSITORY.
const repositoryNwo = (0, repository_1.getRepositoryNwoFromEnv)("CODE_SCANNING_REPOSITORY", "GITHUB_REPOSITORY");
const basehead = `${branches.base}...${branches.head}`;
async function getFileDiffsWithBasehead(baseRef, headLabel, logger) {
const ownerRepo = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
const owner = ownerRepo[0];
const repo = ownerRepo[1];
const basehead = `${baseRef}...${headLabel}`;
try {
const response = await (0, api_client_1.getApiClient)().rest.repos.compareCommitsWithBasehead({
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
owner,
repo,
basehead,
per_page: 1,
});
@@ -334,14 +333,6 @@ function writeDiffRangeDataExtensionPack(logger, ranges) {
if (ranges === undefined) {
return undefined;
}
if (ranges.length === 0) {
// An empty diff range means that there are no added or modified lines in
// the pull request. But the `restrictAlertsTo` extensible predicate
// interprets an empty data extension differently, as an indication that
// all alerts should be included. So we need to specifically set the diff
// range to a non-empty list that cannot match any alert location.
ranges = [{ path: "", startLine: 0, endLine: 0 }];
}
const diffRangeDir = path.join(actionsUtil.getTemporaryDirectory(), "pr-diff-range");
fs.mkdirSync(diffRangeDir);
fs.writeFileSync(path.join(diffRangeDir, "qlpack.yml"), `
@@ -358,7 +349,6 @@ extensions:
- addsTo:
pack: codeql/util
extensible: restrictAlertsTo
checkPresence: false
data:
`;
let data = ranges
@@ -378,27 +368,23 @@ extensions:
const extensionFilePath = path.join(diffRangeDir, "pr-diff-range.yml");
fs.writeFileSync(extensionFilePath, extensionContents);
logger.debug(`Wrote pr-diff-range extension pack to ${extensionFilePath}:\n${extensionContents}`);
// Write the diff ranges to a JSON file, for action-side alert filtering by the
// upload-lib module.
(0, diff_informed_analysis_utils_1.writeDiffRangesJsonFile)(logger, ranges);
return diffRangeDir;
}
// Runs queries and creates sarif files in the given folder
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, cleanupLevel, diffRangePackDir, automationDetailsId, config, logger, features) {
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, diffRangePackDir, automationDetailsId, config, logger, features) {
const statusReport = {};
const queryFlags = [memoryFlag, threadsFlag];
if (cleanupLevel !== "overlay") {
queryFlags.push("--expect-discarded-cache");
}
statusReport.analysis_is_diff_informed = diffRangePackDir !== undefined;
if (diffRangePackDir) {
queryFlags.push(`--additional-packs=${diffRangePackDir}`);
queryFlags.push("--extension-packs=codeql-action/pr-diff-range");
}
const dataExtensionFlags = diffRangePackDir
? [
`--additional-packs=${diffRangePackDir}`,
"--extension-packs=codeql-action/pr-diff-range",
]
: [];
const sarifRunPropertyFlag = diffRangePackDir
? "--sarif-run-property=incrementalMode=diff-informed"
: undefined;
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
const queryFlags = [memoryFlag, threadsFlag, ...dataExtensionFlags];
for (const language of config.languages) {
try {
const sarifFile = path.join(sarifFolder, `${language}.sarif`);

File diff suppressed because one or more lines are too long

2
lib/analyze.test.js generated
View File

@@ -114,7 +114,7 @@ const util = __importStar(require("./util"));
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
recursive: true,
});
const statusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, "brutal", undefined, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.QaTelemetryEnabled]));
const statusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.QaTelemetryEnabled]));
t.deepEqual(Object.keys(statusReport).sort(), [
"analysis_is_diff_informed",
`analyze_builtin_queries_${language}_duration_ms`,

File diff suppressed because one or more lines are too long

21
lib/api-client.js generated
View File

@@ -122,12 +122,14 @@ async function getGitHubVersion() {
* Get the path of the currently executing workflow relative to the repository root.
*/
async function getWorkflowRelativePath() {
const repo_nwo = (0, repository_1.getRepositoryNwo)();
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
const owner = repo_nwo[0];
const repo = repo_nwo[1];
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
const apiClient = getApiClient();
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
owner: repo_nwo.owner,
repo: repo_nwo.repo,
owner,
repo,
run_id,
});
const workflowUrl = runsResponse.data.workflow_url;
@@ -185,7 +187,7 @@ function computeAutomationID(analysis_key, environment) {
}
/** List all Actions cache entries matching the provided key and ref. */
async function listActionsCaches(key, ref) {
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
return await getApiClient().paginate("GET /repos/{owner}/{repo}/actions/caches", {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
@@ -195,7 +197,7 @@ async function listActionsCaches(key, ref) {
}
/** Delete an Actions cache item by its ID. */
async function deleteActionsCache(id) {
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
await getApiClient().rest.actions.deleteActionsCacheById({
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
@@ -204,16 +206,11 @@ async function deleteActionsCache(id) {
}
function wrapApiConfigurationError(e) {
if ((0, util_1.isHTTPError)(e)) {
if (e.message.includes("API rate limit exceeded for installation") ||
if (e.message.includes("API rate limit exceeded for site ID installation") ||
e.message.includes("commit not found") ||
e.message.includes("Resource not accessible by integration") ||
/ref .* not found in this repository/.test(e.message)) {
/^ref .* not found in this repository$/.test(e.message)) {
return new util_1.ConfigurationError(e.message);
}
else if (e.message.includes("Bad credentials") ||
e.message.includes("Not Found")) {
return new util_1.ConfigurationError("Please check that your token is valid and has the required permissions: contents: read, security-events: write");
}
}
return e;
}

File diff suppressed because one or more lines are too long

36
lib/api-client.test.js generated
View File

@@ -120,40 +120,4 @@ function mockGetMetaVersionHeader(versionHeader) {
});
t.deepEqual({ type: util.GitHubVariant.GHE_DOTCOM }, gheDotcom);
});
(0, ava_1.default)("wrapApiConfigurationError correctly wraps specific configuration errors", (t) => {
// We don't reclassify arbitrary errors
const arbitraryError = new Error("arbitrary error");
let res = api.wrapApiConfigurationError(arbitraryError);
t.is(res, arbitraryError);
// Same goes for arbitrary errors
const configError = new util.ConfigurationError("arbitrary error");
res = api.wrapApiConfigurationError(configError);
t.is(res, configError);
// If an HTTP error doesn't contain a specific error message, we don't
// wrap is an an API error.
const httpError = new util.HTTPError("arbitrary HTTP error", 456);
res = api.wrapApiConfigurationError(httpError);
t.is(res, httpError);
// For other HTTP errors, we wrap them as Configuration errors if they contain
// specific error messages.
const httpNotFoundError = new util.HTTPError("commit not found", 404);
res = api.wrapApiConfigurationError(httpNotFoundError);
t.deepEqual(res, new util.ConfigurationError("commit not found"));
const refNotFoundError = new util.HTTPError("ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest", 404);
res = api.wrapApiConfigurationError(refNotFoundError);
t.deepEqual(res, new util.ConfigurationError("ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest"));
const apiRateLimitError = new util.HTTPError("API rate limit exceeded for installation", 403);
res = api.wrapApiConfigurationError(apiRateLimitError);
t.deepEqual(res, new util.ConfigurationError("API rate limit exceeded for installation"));
const tokenSuggestionMessage = "Please check that your token is valid and has the required permissions: contents: read, security-events: write";
const badCredentialsError = new util.HTTPError("Bad credentials", 401);
res = api.wrapApiConfigurationError(badCredentialsError);
t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage));
const notFoundError = new util.HTTPError("Not Found", 404);
res = api.wrapApiConfigurationError(notFoundError);
t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage));
const resourceNotAccessibleError = new util.HTTPError("Resource not accessible by integration", 403);
res = api.wrapApiConfigurationError(resourceNotAccessibleError);
t.deepEqual(res, new util.ConfigurationError("Resource not accessible by integration"));
});
//# sourceMappingURL=api-client.test.js.map

File diff suppressed because one or more lines are too long

View File

@@ -1 +1 @@
{ "maximumVersion": "3.17", "minimumVersion": "3.12" }
{ "maximumVersion": "3.16", "minimumVersion": "3.12" }

2
lib/autobuild.js generated
View File

@@ -123,7 +123,7 @@ async function setupCppAutobuild(codeql, logger) {
const envVar = feature_flags_1.featureConfig[feature_flags_1.Feature.CppDependencyInstallation].envVar;
const featureName = "C++ automatic installation of dependencies";
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
if (await features.getValue(feature_flags_1.Feature.CppDependencyInstallation, codeql)) {
// disable autoinstall on self-hosted runners unless explicitly requested

View File

@@ -1 +1 @@
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAeA,kEAkGC;AAED,8CAmCC;AAED,oCAsBC;AA9KD,oDAAsC;AAEtC,iDAA6E;AAC7E,6CAAgD;AAChD,qCAA6C;AAE7C,uCAAmC;AACnC,+CAAuC;AACvC,mDAAmE;AACnE,2CAAyD;AAEzD,6CAAgD;AAChD,qDAAgD;AAChD,iCAAmC;AAE5B,KAAK,UAAU,2BAA2B,CAC/C,MAAc,EACd,MAA0B,EAC1B,MAAc;IAEd,IACE,CAAC,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,IAAI;QAClC,CAAC,MAAM,MAAM,CAAC,eAAe,CAAC,6BAAY,CAAC,wBAAwB,CAAC,CAAC,CAAC;QACxE,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,MAAM,EACrC,CAAC;QACD,MAAM,CAAC,IAAI,CACT,qBAAqB,MAAM,CAAC,SAAS,2BAA2B;YAC9D,OAAO,gBAAM,CAAC,kBAAkB,wBAAwB,CAC3D,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CACvD,IAAA,4BAAgB,EAAC,CAAC,CAAC,CACpB,CAAC;IAEF,IAAI,CAAC,kBAAkB,EAAE,CAAC;QACxB,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACH,MAAM,2BAA2B,GAAG,kBAAkB,CAAC,MAAM,CAC3D,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,oBAAQ,CAAC,EAAE,CACzB,CAAC;IAEF,MAAM,SAAS,GAAe,EAAE,CAAC;IACjC,yEAAyE;IACzE,UAAU;IACV,IAAI,2BAA2B,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE,CAAC;QACjD,SAAS,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC,CAAC,CAAC,CAAC;IACjD,CAAC;IACD,uEAAuE;IACvE,wCAAwC;IACxC,IAAI,kBAAkB,CAAC,MAAM,KAAK,2BAA2B,CAAC,MAAM,EAAE,CAAC;QACrE,SAAS,CAAC,IAAI,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,kBAAkB,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAE3D,2EAA2E;IAC3E,4EAA4E;IAC5E,2CAA2C;IAC3C,uEAAuE;IACvE,2EAA2E;IAC3E,uEAAuE;IACvE,yCAAyC;IACzC,IAAI,2BAA2B,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC3C,MAAM,CAAC,OAAO,CACZ,oCAAoC,SAAS,CAAC,IAAI,CAChD,OAAO,CACR,8BAA8B,2BAA2B;aACvD,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CACH,OAAO,CACR,kFAAkF;YACnF,OAAO,gBAAM,CAAC,4BAA4B,wBAAwB,CACrE,CAAC;IACJ,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,MAAM,GAAG,6BAAa,CAAC,uBAAO,CAAC,yBAAyB,CAAC,CAAC,MAAM,CAAC;IACvE,MAAM,WAAW,GAAG,4CAA4C,CAAC;IACjE,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,MAAM,aAAa,GAAG,IAAA,6BAAgB,GAAE,CAAC;IACzC,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;IACF,IAAI,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,yBAAyB,EAAE,MAAM,CAAC,EAAE,CAAC;QACvE,yEAAyE;QACzE,IACE,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,KAAK,aAAa;YACnD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,MAAM,EAC9B,CAAC;YACD,MAAM,CAAC,IAAI,CACT,aAAa,WAAW,sCACtB,IAAA,mCAAoB,GAAE,KAAK,SAAS;gBAClC,CAAC,CAAC,8BAA8B,MAAM,yDAAyD,gBAAM,CAAC,oBAAoB,wBAAwB;gBAClJ,CAAC,CAAC,EACN,EAAE,CACH,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACvC,CAAC;aAAM,CAAC;YACN,MAAM,CAAC,IAAI,CACT,YAAY,WAAW,yCAAyC,MAAM,yCAAyC,gBAAM,CAAC,oBAAoB,wBAAwB,CACnK,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtC,CAAC;IACH,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,IAAI,CAAC,aAAa,WAAW,GAAG,CAAC,CAAC;QACzC,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,YAAY,CAChC,MAA0B,EAC1B,QAAkB,EAClB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IAAI,QAAQ,KAAK,oBAAQ,CAAC,GAAG,EAAE,CAAC;QAC9B,MAAM,iBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,IACE,MAAM,CAAC,SAAS;QAChB,CAAC,MAAM,MAAM,CAAC,eAAe,CAAC,6BAAY,CAAC,wBAAwB,CAAC,CAAC,EACrE,CAAC;QACD,MAAM,MAAM,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACvD,CAAC;SAAM,CAAC;QACN,MAAM,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC9C,CAAC;IACD,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE,CAAC;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;IAC3D,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC"}
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAeA,kEAkGC;AAED,8CAqCC;AAED,oCAsBC;AAhLD,oDAAsC;AAEtC,iDAA6E;AAC7E,6CAAgD;AAChD,qCAA6C;AAE7C,uCAAmC;AACnC,+CAAuC;AACvC,mDAAmE;AACnE,2CAAyD;AAEzD,6CAAkD;AAClD,qDAAgD;AAChD,iCAAwD;AAEjD,KAAK,UAAU,2BAA2B,CAC/C,MAAc,EACd,MAA0B,EAC1B,MAAc;IAEd,IACE,CAAC,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,IAAI;QAClC,CAAC,MAAM,MAAM,CAAC,eAAe,CAAC,6BAAY,CAAC,wBAAwB,CAAC,CAAC,CAAC;QACxE,MAAM,CAAC,SAAS,KAAK,gBAAS,CAAC,MAAM,EACrC,CAAC;QACD,MAAM,CAAC,IAAI,CACT,qBAAqB,MAAM,CAAC,SAAS,2BAA2B;YAC9D,OAAO,gBAAM,CAAC,kBAAkB,wBAAwB,CAC3D,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CACvD,IAAA,4BAAgB,EAAC,CAAC,CAAC,CACpB,CAAC;IAEF,IAAI,CAAC,kBAAkB,EAAE,CAAC;QACxB,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACH,MAAM,2BAA2B,GAAG,kBAAkB,CAAC,MAAM,CAC3D,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,oBAAQ,CAAC,EAAE,CACzB,CAAC;IAEF,MAAM,SAAS,GAAe,EAAE,CAAC;IACjC,yEAAyE;IACzE,UAAU;IACV,IAAI,2BAA2B,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE,CAAC;QACjD,SAAS,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC,CAAC,CAAC,CAAC;IACjD,CAAC;IACD,uEAAuE;IACvE,wCAAwC;IACxC,IAAI,kBAAkB,CAAC,MAAM,KAAK,2BAA2B,CAAC,MAAM,EAAE,CAAC;QACrE,SAAS,CAAC,IAAI,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,kBAAkB,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAE3D,2EAA2E;IAC3E,4EAA4E;IAC5E,2CAA2C;IAC3C,uEAAuE;IACvE,2EAA2E;IAC3E,uEAAuE;IACvE,yCAAyC;IACzC,IAAI,2BAA2B,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC3C,MAAM,CAAC,OAAO,CACZ,oCAAoC,SAAS,CAAC,IAAI,CAChD,OAAO,CACR,8BAA8B,2BAA2B;aACvD,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CACH,OAAO,CACR,kFAAkF;YACnF,OAAO,gBAAM,CAAC,4BAA4B,wBAAwB,CACrE,CAAC;IACJ,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,MAAM,GAAG,6BAAa,CAAC,uBAAO,CAAC,yBAAyB,CAAC,CAAC,MAAM,CAAC;IACvE,MAAM,WAAW,GAAG,4CAA4C,CAAC;IACjE,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;IACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;IACF,IAAI,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,yBAAyB,EAAE,MAAM,CAAC,EAAE,CAAC;QACvE,yEAAyE;QACzE,IACE,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,KAAK,aAAa;YACnD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,MAAM,EAC9B,CAAC;YACD,MAAM,CAAC,IAAI,CACT,aAAa,WAAW,sCACtB,IAAA,mCAAoB,GAAE,KAAK,SAAS;gBAClC,CAAC,CAAC,8BAA8B,MAAM,yDAAyD,gBAAM,CAAC,oBAAoB,wBAAwB;gBAClJ,CAAC,CAAC,EACN,EAAE,CACH,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACvC,CAAC;aAAM,CAAC;YACN,MAAM,CAAC,IAAI,CACT,YAAY,WAAW,yCAAyC,MAAM,yCAAyC,gBAAM,CAAC,oBAAoB,wBAAwB,CACnK,CAAC;YACF,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACtC,CAAC;IACH,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,IAAI,CAAC,aAAa,WAAW,GAAG,CAAC,CAAC;QACzC,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,YAAY,CAChC,MAA0B,EAC1B,QAAkB,EAClB,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IAAI,QAAQ,KAAK,oBAAQ,CAAC,GAAG,EAAE,CAAC;QAC9B,MAAM,iBAAiB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,IACE,MAAM,CAAC,SAAS;QAChB,CAAC,MAAM,MAAM,CAAC,eAAe,CAAC,6BAAY,CAAC,wBAAwB,CAAC,CAAC,EACrE,CAAC;QACD,MAAM,MAAM,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACvD,CAAC;SAAM,CAAC;QACN,MAAM,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC9C,CAAC;IACD,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE,CAAC;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;IAC3D,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC"}

4
lib/cli-errors.js generated
View File

@@ -110,7 +110,6 @@ function extractAutobuildErrors(error) {
var CliConfigErrorCategory;
(function (CliConfigErrorCategory) {
CliConfigErrorCategory["AutobuildError"] = "AutobuildError";
CliConfigErrorCategory["CouldNotCreateTempDir"] = "CouldNotCreateTempDir";
CliConfigErrorCategory["ExternalRepositoryCloneFailed"] = "ExternalRepositoryCloneFailed";
CliConfigErrorCategory["GradleBuildFailed"] = "GradleBuildFailed";
CliConfigErrorCategory["IncompatibleWithActionVersion"] = "IncompatibleWithActionVersion";
@@ -140,9 +139,6 @@ exports.cliErrorsConfig = {
new RegExp("We were unable to automatically build your code"),
],
},
[CliConfigErrorCategory.CouldNotCreateTempDir]: {
cliErrorMessageCandidates: [new RegExp("Could not create temp directory")],
},
[CliConfigErrorCategory.ExternalRepositoryCloneFailed]: {
cliErrorMessageCandidates: [
new RegExp("Failed to clone external Git repository"),

File diff suppressed because one or more lines are too long

45
lib/codeql.js generated
View File

@@ -55,7 +55,6 @@ const environment_1 = require("./environment");
const feature_flags_1 = require("./feature-flags");
const git_utils_1 = require("./git-utils");
const languages_1 = require("./languages");
const overlay_database_utils_1 = require("./overlay-database-utils");
const setupCodeql = __importStar(require("./setup-codeql"));
const tools_features_1 = require("./tools-features");
const tracer_config_1 = require("./tracer-config");
@@ -134,11 +133,7 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
};
}
catch (e) {
const ErrorClass = e instanceof util.ConfigurationError ||
(e instanceof Error && e.message.includes("ENOSPC")) // out of disk space
? util.ConfigurationError
: Error;
throw new ErrorClass(`Unable to download and extract CodeQL CLI: ${(0, util_1.getErrorMessage)(e)}${e instanceof Error && e.stack ? `\n\nDetails: ${e.stack}` : ""}`);
throw new Error(`Unable to download and extract CodeQL CLI: ${(0, util_1.getErrorMessage)(e)}${e instanceof Error && e.stack ? `\n\nDetails: ${e.stack}` : ""}`);
}
}
/**
@@ -255,7 +250,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
async supportsFeature(feature) {
return (0, tools_features_1.isSupportedToolsFeature)(await this.getVersion(), feature);
},
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, overlayDatabaseMode, logger) {
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) {
const extraArgs = config.languages.map((language) => `--language=${language}`);
if (await (0, tracer_config_1.shouldEnableIndirectTracing)(codeql, config)) {
extraArgs.push("--begin-tracing");
@@ -263,17 +258,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
extraArgs.push(`--trace-process-name=${processName}`);
}
if (config.languages.indexOf(languages_1.Language.actions) >= 0) {
// We originally added an embedded version of the Actions extractor to the CodeQL Action
// itself in order to deploy the extractor between CodeQL releases. When we did add the
// extractor to the CLI, though, its autobuild script was missing the execute bit.
// 2.20.6 is the first CLI release with the fully-functional extractor in the CLI. For older
// versions, we'll keep using the embedded extractor. We can remove the embedded extractor
// once 2.20.6 is deployed in the runner images.
if (!(await util.codeQlVersionAtLeast(codeql, "2.20.6"))) {
extraArgs.push("--search-path");
const extractorPath = path.resolve(__dirname, "../actions-extractor");
extraArgs.push(extractorPath);
}
extraArgs.push("--search-path");
const extractorPath = path.resolve(__dirname, "../actions-extractor");
extraArgs.push(extractorPath);
}
const codeScanningConfigFile = await generateCodeScanningConfig(config, logger);
const externalRepositoryToken = (0, actions_util_1.getOptionalInput)("external-repository-token");
@@ -291,19 +278,10 @@ async function getCodeQLForCmd(cmd, checkVersion) {
const overwriteFlag = (0, tools_features_1.isSupportedToolsFeature)(await this.getVersion(), tools_features_1.ToolsFeature.ForceOverwrite)
? "--force-overwrite"
: "--overwrite";
if (overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.Overlay) {
const overlayChangesFile = await (0, overlay_database_utils_1.writeOverlayChangesFile)(config, sourceRoot, logger);
extraArgs.push(`--overlay-changes=${overlayChangesFile}`);
}
else if (overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.OverlayBase) {
extraArgs.push("--overlay-base");
}
await runCli(cmd, [
"database",
"init",
...(overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.Overlay
? []
: [overwriteFlag]),
overwriteFlag,
"--db-cluster",
config.dbLocation,
`--source-root=${sourceRoot}`,
@@ -315,9 +293,6 @@ async function getCodeQLForCmd(cmd, checkVersion) {
ignoringOptions: ["--overwrite"],
}),
], { stdin: externalRepositoryToken });
if (overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.OverlayBase) {
await (0, overlay_database_utils_1.writeBaseDatabaseOidsFile)(config, sourceRoot);
}
},
async runAutobuild(config, language) {
applyAutobuildAzurePipelinesTimeoutFix();
@@ -471,6 +446,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
"run-queries",
...flags,
databasePath,
"--expect-discarded-cache",
"--intra-layer-parallelism",
"--min-disk-free=1024", // Try to leave at least 1GB free
"-v",
@@ -809,13 +785,6 @@ async function generateCodeScanningConfig(config, logger) {
if (Array.isArray(augmentedConfig.packs) && !augmentedConfig.packs.length) {
delete augmentedConfig.packs;
}
augmentedConfig["query-filters"] = [
...(config.augmentationProperties.defaultQueryFilters || []),
...(augmentedConfig["query-filters"] || []),
];
if (augmentedConfig["query-filters"]?.length === 0) {
delete augmentedConfig["query-filters"];
}
logger.info(`Writing augmented user configuration file to ${codeScanningConfigFile}`);
logger.startGroup("Augmented user configuration file contents");
logger.info(yaml.dump(augmentedConfig));

File diff suppressed because one or more lines are too long

11
lib/codeql.test.js generated
View File

@@ -53,7 +53,6 @@ const defaults = __importStar(require("./defaults.json"));
const doc_url_1 = require("./doc-url");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const overlay_database_utils_1 = require("./overlay-database-utils");
const setup_codeql_1 = require("./setup-codeql");
const testing_utils_1 = require("./testing-utils");
const tools_features_1 = require("./tools-features");
@@ -336,7 +335,7 @@ const injectedConfigMacro = ava_1.default.macro({
tempDir,
augmentationProperties,
};
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, undefined, overlay_database_utils_1.OverlayDatabaseMode.None, (0, logging_1.getRunnerLogger)(true));
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, undefined, (0, logging_1.getRunnerLogger)(true));
const args = runnerConstructorStub.firstCall.args[1];
// should have used an config file
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
@@ -472,7 +471,7 @@ const injectedConfigMacro = ava_1.default.macro({
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, "/path/to/qlconfig.yml", overlay_database_utils_1.OverlayDatabaseMode.None, (0, logging_1.getRunnerLogger)(true));
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
const args = runnerConstructorStub.firstCall.args[1];
// should have used a config file
const hasCodeScanningConfigArg = args.some((arg) => arg.startsWith("--codescanning-config="));
@@ -488,7 +487,7 @@ const injectedConfigMacro = ava_1.default.macro({
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves((0, testing_utils_1.makeVersionInfo)("2.17.6"));
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, undefined, // undefined qlconfigFile
overlay_database_utils_1.OverlayDatabaseMode.None, (0, logging_1.getRunnerLogger)(true));
(0, logging_1.getRunnerLogger)(true));
const args = runnerConstructorStub.firstCall.args[1];
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
t.false(hasQlconfigArg, "should NOT have injected a qlconfig");
@@ -613,7 +612,7 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
sinon.stub(io, "which").resolves("");
await t.throwsAsync(async () => await codeqlObject.databaseRunQueries(stubConfig.dbLocation, []), {
instanceOf: cli_errors_1.CliError,
message: `Encountered a fatal error while running "codeql-for-testing database run-queries --intra-layer-parallelism --min-disk-free=1024 -v". Exit code was 1 and error was: Oops! A fatal internal error occurred. Details:
message: `Encountered a fatal error while running "codeql-for-testing database run-queries --expect-discarded-cache --intra-layer-parallelism --min-disk-free=1024 -v". Exit code was 1 and error was: Oops! A fatal internal error occurred. Details:
com.semmle.util.exception.CatastrophicError: An error occurred while evaluating ControlFlowGraph::ControlFlow::Root.isRootOf/1#dispred#f610e6ed/2@86282cc8
Severe disk cache trouble (corruption or out of space) at /home/runner/work/_temp/codeql_databases/go/db-go/default/cache/pages/28/33.pack: Failed to write item to disk. See the logs for more details.`,
});
@@ -639,7 +638,7 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
sinon.stub(io, "which").resolves("");
process.env["CODEQL_ACTION_EXTRA_OPTIONS"] =
'{ "database": { "init": ["--overwrite"] } }';
await codeqlObject.databaseInitCluster(stubConfig, "sourceRoot", undefined, undefined, overlay_database_utils_1.OverlayDatabaseMode.None, (0, logging_1.getRunnerLogger)(false));
await codeqlObject.databaseInitCluster(stubConfig, "sourceRoot", undefined, undefined, (0, logging_1.getRunnerLogger)(false));
t.true(runnerConstructorStub.calledOnce);
const args = runnerConstructorStub.firstCall.args[1];
t.is(args.filter((option) => option === "--overwrite").length, 1, "--overwrite should only be passed once");

File diff suppressed because one or more lines are too long

16
lib/config-utils.js generated
View File

@@ -64,7 +64,6 @@ const yaml = __importStar(require("js-yaml"));
const semver = __importStar(require("semver"));
const api = __importStar(require("./api-client"));
const caching_utils_1 = require("./caching-utils");
const diff_informed_analysis_utils_1 = require("./diff-informed-analysis-utils");
const feature_flags_1 = require("./feature-flags");
const languages_1 = require("./languages");
const trap_caching_1 = require("./trap-caching");
@@ -80,7 +79,6 @@ exports.defaultAugmentationProperties = {
packsInputCombines: false,
packsInput: undefined,
queriesInput: undefined,
defaultQueryFilters: [],
};
function getPacksStrInvalid(packStr, configFile) {
return configFile
@@ -229,7 +227,7 @@ async function getRawLanguages(languagesInput, repository, logger) {
async function getDefaultConfig({ languagesInput, queriesInput, packsInput, buildModeInput, dbLocation, trapCachingEnabled, dependencyCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeql, githubVersion, features, logger, }) {
const languages = await getLanguages(codeql, languagesInput, repository, logger);
const buildMode = await parseBuildModeInput(buildModeInput, languages, features, logger);
const augmentationProperties = await calculateAugmentation(codeql, features, packsInput, queriesInput, languages, logger);
const augmentationProperties = calculateAugmentation(packsInput, queriesInput, languages);
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeql, languages, logger);
return {
languages,
@@ -279,7 +277,7 @@ async function loadConfig({ languagesInput, queriesInput, packsInput, buildModeI
}
const languages = await getLanguages(codeql, languagesInput, repository, logger);
const buildMode = await parseBuildModeInput(buildModeInput, languages, features, logger);
const augmentationProperties = await calculateAugmentation(codeql, features, packsInput, queriesInput, languages, logger);
const augmentationProperties = calculateAugmentation(packsInput, queriesInput, languages);
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeql, languages, logger);
return {
languages,
@@ -305,14 +303,11 @@ async function loadConfig({ languagesInput, queriesInput, packsInput, buildModeI
* and the CLI does not know about these inputs so we need to inject them into
* the config file sent to the CLI.
*
* @param codeql The CodeQL object.
* @param features The feature enablement object.
* @param rawPacksInput The packs input from the action configuration.
* @param rawQueriesInput The queries input from the action configuration.
* @param languages The languages that the config file is for. If the packs input
* is non-empty, then there must be exactly one language. Otherwise, an
* error is thrown.
* @param logger The logger to use for logging.
*
* @returns The properties that need to be augmented in the config file.
*
@@ -320,21 +315,16 @@ async function loadConfig({ languagesInput, queriesInput, packsInput, buildModeI
* not have exactly one language.
*/
// exported for testing.
async function calculateAugmentation(codeql, features, rawPacksInput, rawQueriesInput, languages, logger) {
function calculateAugmentation(rawPacksInput, rawQueriesInput, languages) {
const packsInputCombines = shouldCombine(rawPacksInput);
const packsInput = parsePacksFromInput(rawPacksInput, languages, packsInputCombines);
const queriesInputCombines = shouldCombine(rawQueriesInput);
const queriesInput = parseQueriesFromInput(rawQueriesInput, queriesInputCombines);
const defaultQueryFilters = [];
if (await (0, diff_informed_analysis_utils_1.shouldPerformDiffInformedAnalysis)(codeql, features, logger)) {
defaultQueryFilters.push({ exclude: { tags: "exclude-from-incremental" } });
}
return {
packsInputCombines,
packsInput: packsInput?.[languages[0]],
queriesInput,
queriesInputCombines,
defaultQueryFilters,
};
}
function parseQueriesFromInput(rawQueriesInput, queriesInputCombines) {

File diff suppressed because one or more lines are too long

View File

@@ -624,7 +624,7 @@ const packSpecPrettyPrintingMacro = ava_1.default.macro({
const mockLogger = (0, logging_1.getRunnerLogger)(true);
const calculateAugmentationMacro = ava_1.default.macro({
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedAugmentationProperties) => {
const actualAugmentationProperties = await configUtils.calculateAugmentation((0, codeql_1.getCachedCodeQL)(), (0, testing_utils_1.createFeatures)([]), rawPacksInput, rawQueriesInput, languages, mockLogger);
const actualAugmentationProperties = configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, languages);
t.deepEqual(actualAugmentationProperties, expectedAugmentationProperties);
},
title: (_, title) => `Calculate Augmentation: ${title}`,
@@ -634,39 +634,34 @@ const calculateAugmentationMacro = ava_1.default.macro({
queriesInput: undefined,
packsInputCombines: false,
packsInput: undefined,
defaultQueryFilters: [],
});
(0, ava_1.default)(calculateAugmentationMacro, "With queries", undefined, " a, b , c, d", [languages_1.Language.javascript], {
queriesInputCombines: false,
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
packsInputCombines: false,
packsInput: undefined,
defaultQueryFilters: [],
});
(0, ava_1.default)(calculateAugmentationMacro, "With queries combining", undefined, " + a, b , c, d ", [languages_1.Language.javascript], {
queriesInputCombines: true,
queriesInput: [{ uses: "a" }, { uses: "b" }, { uses: "c" }, { uses: "d" }],
packsInputCombines: false,
packsInput: undefined,
defaultQueryFilters: [],
});
(0, ava_1.default)(calculateAugmentationMacro, "With packs", " codeql/a , codeql/b , codeql/c , codeql/d ", undefined, [languages_1.Language.javascript], {
queriesInputCombines: false,
queriesInput: undefined,
packsInputCombines: false,
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
defaultQueryFilters: [],
});
(0, ava_1.default)(calculateAugmentationMacro, "With packs combining", " + codeql/a, codeql/b, codeql/c, codeql/d", undefined, [languages_1.Language.javascript], {
queriesInputCombines: false,
queriesInput: undefined,
packsInputCombines: true,
packsInput: ["codeql/a", "codeql/b", "codeql/c", "codeql/d"],
defaultQueryFilters: [],
});
const calculateAugmentationErrorMacro = ava_1.default.macro({
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedError) => {
await t.throwsAsync(() => configUtils.calculateAugmentation((0, codeql_1.getCachedCodeQL)(), (0, testing_utils_1.createFeatures)([]), rawPacksInput, rawQueriesInput, languages, mockLogger), { message: expectedError });
t.throws(() => configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, languages), { message: expectedError });
},
title: (_, title) => `Calculate Augmentation Error: ${title}`,
});

File diff suppressed because one or more lines are too long

57
lib/debug-artifacts.js generated
View File

@@ -46,7 +46,7 @@ const path = __importStar(require("path"));
const artifact = __importStar(require("@actions/artifact"));
const artifactLegacy = __importStar(require("@actions/artifact-legacy"));
const core = __importStar(require("@actions/core"));
const archiver_1 = __importDefault(require("archiver"));
const adm_zip_1 = __importDefault(require("adm-zip"));
const del_1 = __importDefault(require("del"));
const actions_util_1 = require("./actions-util");
const analyze_1 = require("./analyze");
@@ -66,28 +66,26 @@ async function uploadCombinedSarifArtifacts(logger, gitHubVariant, codeQlVersion
const tempDir = (0, actions_util_1.getTemporaryDirectory)();
// Upload Actions SARIF artifacts for debugging when environment variable is set
if (process.env["CODEQL_ACTION_DEBUG_COMBINED_SARIF"] === "true") {
await (0, logging_1.withGroup)("Uploading combined SARIF debug artifact", async () => {
logger.info("Uploading available combined SARIF files as Actions debugging artifact...");
const baseTempDir = path.resolve(tempDir, "combined-sarif");
const toUpload = [];
if (fs.existsSync(baseTempDir)) {
const outputDirs = fs.readdirSync(baseTempDir);
for (const outputDir of outputDirs) {
const sarifFiles = fs
.readdirSync(path.resolve(baseTempDir, outputDir))
.filter((f) => f.endsWith(".sarif"));
for (const sarifFile of sarifFiles) {
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
}
logger.info("Uploading available combined SARIF files as Actions debugging artifact...");
const baseTempDir = path.resolve(tempDir, "combined-sarif");
const toUpload = [];
if (fs.existsSync(baseTempDir)) {
const outputDirs = fs.readdirSync(baseTempDir);
for (const outputDir of outputDirs) {
const sarifFiles = fs
.readdirSync(path.resolve(baseTempDir, outputDir))
.filter((f) => f.endsWith(".sarif"));
for (const sarifFile of sarifFiles) {
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
}
}
try {
await uploadDebugArtifacts(logger, toUpload, baseTempDir, "combined-sarif-artifacts", gitHubVariant, codeQlVersion);
}
catch (e) {
logger.warning(`Failed to upload combined SARIF files as Actions debugging artifact. Reason: ${(0, util_1.getErrorMessage)(e)}`);
}
});
}
try {
await uploadDebugArtifacts(logger, toUpload, baseTempDir, "combined-sarif-artifacts", gitHubVariant, codeQlVersion);
}
catch (e) {
logger.warning(`Failed to upload combined SARIF files as Actions debugging artifact. Reason: ${(0, util_1.getErrorMessage)(e)}`);
}
}
}
/**
@@ -250,20 +248,9 @@ async function createPartialDatabaseBundle(config, language) {
if (fs.existsSync(databaseBundlePath)) {
await (0, del_1.default)(databaseBundlePath, { force: true });
}
const output = fs.createWriteStream(databaseBundlePath);
const zip = (0, archiver_1.default)("zip");
zip.on("error", (err) => {
throw err;
});
zip.on("warning", (err) => {
// Ignore ENOENT warnings. There's nothing anyone can do about it.
if (err.code !== "ENOENT") {
throw err;
}
});
zip.pipe(output);
zip.directory(databasePath, false);
await zip.finalize();
const zip = new adm_zip_1.default();
zip.addLocalFolder(databasePath);
zip.writeZip(databaseBundlePath);
return databaseBundlePath;
}
/**

File diff suppressed because one or more lines are too long

View File

@@ -1,6 +1,6 @@
{
"bundleVersion": "codeql-bundle-v2.21.1",
"cliVersion": "2.21.1",
"priorBundleVersion": "codeql-bundle-v2.21.0",
"priorCliVersion": "2.21.0"
"bundleVersion": "codeql-bundle-v2.20.3",
"cliVersion": "2.20.3",
"priorBundleVersion": "codeql-bundle-v2.20.2",
"priorCliVersion": "2.20.2"
}

View File

@@ -33,27 +33,17 @@ var __importStar = (this && this.__importStar) || (function () {
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.getJavaTempDependencyDir = getJavaTempDependencyDir;
exports.downloadDependencyCaches = downloadDependencyCaches;
exports.uploadDependencyCaches = uploadDependencyCaches;
const os = __importStar(require("os"));
const path_1 = require("path");
const actionsCache = __importStar(require("@actions/cache"));
const glob = __importStar(require("@actions/glob"));
const actions_util_1 = require("./actions-util");
const caching_utils_1 = require("./caching-utils");
const environment_1 = require("./environment");
const util_1 = require("./util");
const CODEQL_DEPENDENCY_CACHE_PREFIX = "codeql-dependencies";
const CODEQL_DEPENDENCY_CACHE_VERSION = 1;
/**
* Returns a path to a directory intended to be used to store .jar files
* for the Java `build-mode: none` extractor.
* @returns The path to the directory that should be used by the `build-mode: none` extractor.
*/
function getJavaTempDependencyDir() {
return (0, path_1.join)((0, actions_util_1.getTemporaryDirectory)(), "codeql_java", "repository");
}
/**
* Default caching configurations per language.
*/
@@ -64,8 +54,6 @@ const CODEQL_DEFAULT_CACHE_CONFIG = {
(0, path_1.join)(os.homedir(), ".m2", "repository"),
// Gradle
(0, path_1.join)(os.homedir(), ".gradle", "caches"),
// CodeQL Java build-mode: none
getJavaTempDependencyDir(),
],
hash: [
// Maven

View File

@@ -1 +1 @@
{"version":3,"file":"dependency-caching.js","sourceRoot":"","sources":["../src/dependency-caching.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAoCA,4DAEC;AAqDD,4DAmDC;AAQD,wDAiEC;AAvND,uCAAyB;AACzB,+BAA4B;AAE5B,6DAA+C;AAC/C,oDAAsC;AAEtC,iDAAuD;AACvD,mDAAoD;AAEpD,+CAAuC;AAGvC,iCAA6C;AAgB7C,MAAM,8BAA8B,GAAG,qBAAqB,CAAC;AAC7D,MAAM,+BAA+B,GAAG,CAAC,CAAC;AAE1C;;;;GAIG;AACH,SAAgB,wBAAwB;IACtC,OAAO,IAAA,WAAI,EAAC,IAAA,oCAAqB,GAAE,EAAE,aAAa,EAAE,YAAY,CAAC,CAAC;AACpE,CAAC;AAED;;GAEG;AACH,MAAM,2BAA2B,GAAwC;IACvE,IAAI,EAAE;QACJ,KAAK,EAAE;YACL,QAAQ;YACR,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,KAAK,EAAE,YAAY,CAAC;YACvC,SAAS;YACT,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,QAAQ,CAAC;YACvC,+BAA+B;YAC/B,wBAAwB,EAAE;SAC3B;QACD,IAAI,EAAE;YACJ,QAAQ;YACR,YAAY;YACZ,SAAS;YACT,cAAc;YACd,8BAA8B;YAC9B,yBAAyB;YACzB,6BAA6B;YAC7B,wBAAwB;YACxB,wBAAwB;SACzB;KACF;IACD,MAAM,EAAE;QACN,KAAK,EAAE,CAAC,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC;QACjD,IAAI,EAAE;YACJ,QAAQ;YACR,uBAAuB;YACvB,QAAQ;YACR,eAAe;SAChB;KACF;IACD,EAAE,EAAE;QACF,KAAK,EAAE,CAAC,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAC/C,IAAI,EAAE,CAAC,WAAW,CAAC;KACpB;CACF,CAAC;AAEF,KAAK,UAAU,WAAW,CAAC,QAAkB;IAC3C,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1C,CAAC;AAED;;;;;;GAMG;AACI,KAAK,UAAU,wBAAwB,CAC5C,SAAqB,EACrB,MAAc;IAEd,MAAM,cAAc,GAAe,EAAE,CAAC;IAEtC,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;QACjC,MAAM,WAAW,GAAG,2BAA2B,CAAC,QAAQ,CAAC,CAAC;QAE1D,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,CAAC,IAAI,CACT,6CAA6C,QAAQ,8CAA8C,CACpG,CAAC;YACF,SAAS;QACX,CAAC;QAED,gGAAgG;QAChG,wBAAwB;QACxB,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAEpD,IAAI,CAAC,MAAM,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxC,MAAM,CAAC,IAAI,CACT,6CAA6C,QAAQ,mDAAmD,CACzG,CAAC;YACF,SAAS;QACX,CAAC;QAED,MAAM,UAAU,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QACzD,MAAM,WAAW,GAAa,CAAC,MAAM,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;QAE5D,MAAM,CAAC,IAAI,CACT,yBAAyB,QAAQ,aAAa,UAAU,qBAAqB,WAAW,CAAC,IAAI,CAC3F,IAAI,CACL,EAAE,CACJ,CAAC;QAEF,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,YAAY,CAC5C,WAAW,CAAC,KAAK,EACjB,UAAU,EACV,WAAW,CACZ,CAAC;QAEF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,CAAC,IAAI,CAAC,oBAAoB,MAAM,QAAQ,QAAQ,GAAG,CAAC,CAAC;YAC3D,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAChC,CAAC;aAAM,CAAC;YACN,MAAM,CAAC,IAAI,CAAC,+BAA+B,QAAQ,GAAG,CAAC,CAAC;QAC1D,CAAC;IACH,CAAC;IAED,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;GAKG;AACI,KAAK,UAAU,sBAAsB,CAAC,MAAc,EAAE,MAAc;IACzE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACxC,MAAM,WAAW,GAAG,2BAA2B,CAAC,QAAQ,CAAC,CAAC;QAE1D,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,8CAA8C,CAClG,CAAC;YACF,SAAS;QACX,CAAC;QAED,gGAAgG;QAChG,wBAAwB;QACxB,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAEpD,IAAI,CAAC,MAAM,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxC,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,mDAAmD,CACvG,CAAC;YACF,SAAS;QACX,CAAC;QAED,yGAAyG;QACzG,uGAAuG;QACvG,uCAAuC;QACvC,uGAAuG;QACvG,uGAAuG;QACvG,sCAAsC;QACtC,uGAAuG;QACvG,sGAAsG;QACtG,sGAAsG;QACtG,4CAA4C;QAC5C,MAAM,IAAI,GAAG,MAAM,IAAA,iCAAiB,EAAC,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;QAEtE,iCAAiC;QACjC,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;YACf,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,qBAAqB,CACzE,CAAC;YACF,SAAS;QACX,CAAC;QAED,MAAM,GAAG,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QAElD,MAAM,CAAC,IAAI,CACT,2BAA2B,IAAI,QAAQ,QAAQ,aAAa,GAAG,KAAK,CACrE,CAAC;QAEF,IAAI,CAAC;YACH,MAAM,YAAY,CAAC,SAAS,CAAC,WAAW,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;QACvD,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,yFAAyF;YACzF,uFAAuF;YACvF,gCAAgC;YAChC,IAAI,KAAK,YAAY,YAAY,CAAC,iBAAiB,EAAE,CAAC;gBACpD,MAAM,CAAC,IAAI,CACT,2BAA2B,QAAQ,aAAa,GAAG,qBAAqB,CACzE,CAAC;gBACF,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YAC9B,CAAC;iBAAM,CAAC;gBACN,kCAAkC;gBAClC,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;;GAMG;AACH,KAAK,UAAU,QAAQ,CACrB,QAAkB,EAClB,WAAwB;IAExB,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;IAC/D,OAAO,GAAG,MAAM,WAAW,CAAC,QAAQ,CAAC,GAAG,IAAI,EAAE,CAAC;AACjD,CAAC;AAED;;;;;;GAMG;AACH,KAAK,UAAU,WAAW,CAAC,QAAkB;IAC3C,MAAM,QAAQ,GAAG,IAAA,0BAAmB,EAAC,WAAW,CAAC,CAAC;IAClD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,yBAAyB,CAAC,CAAC;IACnE,IAAI,MAAM,GAAG,8BAA8B,CAAC;IAE5C,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC1D,MAAM,GAAG,GAAG,MAAM,IAAI,YAAY,EAAE,CAAC;IACvC,CAAC;IAED,OAAO,GAAG,MAAM,IAAI,+BAA+B,IAAI,QAAQ,IAAI,QAAQ,GAAG,CAAC;AACjF,CAAC"}
{"version":3,"file":"dependency-caching.js","sourceRoot":"","sources":["../src/dependency-caching.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+EA,4DAmDC;AAQD,wDAiEC;AA3MD,uCAAyB;AACzB,+BAA4B;AAE5B,6DAA+C;AAC/C,oDAAsC;AAEtC,mDAAoD;AAEpD,+CAAuC;AAGvC,iCAA6C;AAgB7C,MAAM,8BAA8B,GAAG,qBAAqB,CAAC;AAC7D,MAAM,+BAA+B,GAAG,CAAC,CAAC;AAE1C;;GAEG;AACH,MAAM,2BAA2B,GAAwC;IACvE,IAAI,EAAE;QACJ,KAAK,EAAE;YACL,QAAQ;YACR,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,KAAK,EAAE,YAAY,CAAC;YACvC,SAAS;YACT,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,QAAQ,CAAC;SACxC;QACD,IAAI,EAAE;YACJ,QAAQ;YACR,YAAY;YACZ,SAAS;YACT,cAAc;YACd,8BAA8B;YAC9B,yBAAyB;YACzB,6BAA6B;YAC7B,wBAAwB;YACxB,wBAAwB;SACzB;KACF;IACD,MAAM,EAAE;QACN,KAAK,EAAE,CAAC,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC;QACjD,IAAI,EAAE;YACJ,QAAQ;YACR,uBAAuB;YACvB,QAAQ;YACR,eAAe;SAChB;KACF;IACD,EAAE,EAAE;QACF,KAAK,EAAE,CAAC,IAAA,WAAI,EAAC,EAAE,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;QAC/C,IAAI,EAAE,CAAC,WAAW,CAAC;KACpB;CACF,CAAC;AAEF,KAAK,UAAU,WAAW,CAAC,QAAkB;IAC3C,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1C,CAAC;AAED;;;;;;GAMG;AACI,KAAK,UAAU,wBAAwB,CAC5C,SAAqB,EACrB,MAAc;IAEd,MAAM,cAAc,GAAe,EAAE,CAAC;IAEtC,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;QACjC,MAAM,WAAW,GAAG,2BAA2B,CAAC,QAAQ,CAAC,CAAC;QAE1D,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,CAAC,IAAI,CACT,6CAA6C,QAAQ,8CAA8C,CACpG,CAAC;YACF,SAAS;QACX,CAAC;QAED,gGAAgG;QAChG,wBAAwB;QACxB,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAEpD,IAAI,CAAC,MAAM,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxC,MAAM,CAAC,IAAI,CACT,6CAA6C,QAAQ,mDAAmD,CACzG,CAAC;YACF,SAAS;QACX,CAAC;QAED,MAAM,UAAU,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QACzD,MAAM,WAAW,GAAa,CAAC,MAAM,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;QAE5D,MAAM,CAAC,IAAI,CACT,yBAAyB,QAAQ,aAAa,UAAU,qBAAqB,WAAW,CAAC,IAAI,CAC3F,IAAI,CACL,EAAE,CACJ,CAAC;QAEF,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC,YAAY,CAC5C,WAAW,CAAC,KAAK,EACjB,UAAU,EACV,WAAW,CACZ,CAAC;QAEF,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,CAAC,IAAI,CAAC,oBAAoB,MAAM,QAAQ,QAAQ,GAAG,CAAC,CAAC;YAC3D,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAChC,CAAC;aAAM,CAAC;YACN,MAAM,CAAC,IAAI,CAAC,+BAA+B,QAAQ,GAAG,CAAC,CAAC;QAC1D,CAAC;IACH,CAAC;IAED,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;GAKG;AACI,KAAK,UAAU,sBAAsB,CAAC,MAAc,EAAE,MAAc;IACzE,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACxC,MAAM,WAAW,GAAG,2BAA2B,CAAC,QAAQ,CAAC,CAAC;QAE1D,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,8CAA8C,CAClG,CAAC;YACF,SAAS;QACX,CAAC;QAED,gGAAgG;QAChG,wBAAwB;QACxB,MAAM,OAAO,GAAG,MAAM,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAEpD,IAAI,CAAC,MAAM,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACxC,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,mDAAmD,CACvG,CAAC;YACF,SAAS;QACX,CAAC;QAED,yGAAyG;QACzG,uGAAuG;QACvG,uCAAuC;QACvC,uGAAuG;QACvG,uGAAuG;QACvG,sCAAsC;QACtC,uGAAuG;QACvG,sGAAsG;QACtG,sGAAsG;QACtG,4CAA4C;QAC5C,MAAM,IAAI,GAAG,MAAM,IAAA,iCAAiB,EAAC,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;QAEtE,iCAAiC;QACjC,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;YACf,MAAM,CAAC,IAAI,CACT,2CAA2C,QAAQ,qBAAqB,CACzE,CAAC;YACF,SAAS;QACX,CAAC;QAED,MAAM,GAAG,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QAElD,MAAM,CAAC,IAAI,CACT,2BAA2B,IAAI,QAAQ,QAAQ,aAAa,GAAG,KAAK,CACrE,CAAC;QAEF,IAAI,CAAC;YACH,MAAM,YAAY,CAAC,SAAS,CAAC,WAAW,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;QACvD,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,yFAAyF;YACzF,uFAAuF;YACvF,gCAAgC;YAChC,IAAI,KAAK,YAAY,YAAY,CAAC,iBAAiB,EAAE,CAAC;gBACpD,MAAM,CAAC,IAAI,CACT,2BAA2B,QAAQ,aAAa,GAAG,qBAAqB,CACzE,CAAC;gBACF,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YAC9B,CAAC;iBAAM,CAAC;gBACN,kCAAkC;gBAClC,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;;GAMG;AACH,KAAK,UAAU,QAAQ,CACrB,QAAkB,EAClB,WAAwB;IAExB,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;IAC/D,OAAO,GAAG,MAAM,WAAW,CAAC,QAAQ,CAAC,GAAG,IAAI,EAAE,CAAC;AACjD,CAAC;AAED;;;;;;GAMG;AACH,KAAK,UAAU,WAAW,CAAC,QAAkB;IAC3C,MAAM,QAAQ,GAAG,IAAA,0BAAmB,EAAC,WAAW,CAAC,CAAC;IAClD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,yBAAyB,CAAC,CAAC;IACnE,IAAI,MAAM,GAAG,8BAA8B,CAAC;IAE5C,IAAI,YAAY,KAAK,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC1D,MAAM,GAAG,GAAG,MAAM,IAAI,YAAY,EAAE,CAAC;IACvC,CAAC;IAED,OAAO,GAAG,MAAM,IAAI,+BAA+B,IAAI,QAAQ,IAAI,QAAQ,GAAG,CAAC;AACjF,CAAC"}

View File

@@ -1,114 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.shouldPerformDiffInformedAnalysis = shouldPerformDiffInformedAnalysis;
exports.getDiffInformedAnalysisBranches = getDiffInformedAnalysisBranches;
exports.writeDiffRangesJsonFile = writeDiffRangesJsonFile;
exports.readDiffRangesJsonFile = readDiffRangesJsonFile;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const github = __importStar(require("@actions/github"));
const actionsUtil = __importStar(require("./actions-util"));
const feature_flags_1 = require("./feature-flags");
function getPullRequestBranches() {
const pullRequest = github.context.payload.pull_request;
if (pullRequest) {
return {
base: pullRequest.base.ref,
// We use the head label instead of the head ref here, because the head
// ref lacks owner information and by itself does not uniquely identify
// the head branch (which may be in a forked repository).
head: pullRequest.head.label,
};
}
// PR analysis under Default Setup does not have the pull_request context,
// but it should set CODE_SCANNING_REF and CODE_SCANNING_BASE_BRANCH.
const codeScanningRef = process.env.CODE_SCANNING_REF;
const codeScanningBaseBranch = process.env.CODE_SCANNING_BASE_BRANCH;
if (codeScanningRef && codeScanningBaseBranch) {
return {
base: codeScanningBaseBranch,
// PR analysis under Default Setup analyzes the PR head commit instead of
// the merge commit, so we can use the provided ref directly.
head: codeScanningRef,
};
}
return undefined;
}
/**
* Check if the action should perform diff-informed analysis.
*/
async function shouldPerformDiffInformedAnalysis(codeql, features, logger) {
return ((await getDiffInformedAnalysisBranches(codeql, features, logger)) !==
undefined);
}
/**
* Get the branches to use for diff-informed analysis.
*
* @returns If the action should perform diff-informed analysis, return
* the base and head branches that should be used to compute the diff ranges.
* Otherwise return `undefined`.
*/
async function getDiffInformedAnalysisBranches(codeql, features, logger) {
if (!(await features.getValue(feature_flags_1.Feature.DiffInformedQueries, codeql))) {
return undefined;
}
const branches = getPullRequestBranches();
if (!branches) {
logger.info("Not performing diff-informed analysis " +
"because we are not analyzing a pull request.");
}
return branches;
}
function getDiffRangesJsonFilePath() {
return path.join(actionsUtil.getTemporaryDirectory(), "pr-diff-range.json");
}
function writeDiffRangesJsonFile(logger, ranges) {
const jsonContents = JSON.stringify(ranges, null, 2);
const jsonFilePath = getDiffRangesJsonFilePath();
fs.writeFileSync(jsonFilePath, jsonContents);
logger.debug(`Wrote pr-diff-range JSON file to ${jsonFilePath}:\n${jsonContents}`);
}
function readDiffRangesJsonFile(logger) {
const jsonFilePath = getDiffRangesJsonFilePath();
if (!fs.existsSync(jsonFilePath)) {
logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`);
return undefined;
}
const jsonContents = fs.readFileSync(jsonFilePath, "utf8");
logger.debug(`Read pr-diff-range JSON file from ${jsonFilePath}:\n${jsonContents}`);
return JSON.parse(jsonContents);
}
//# sourceMappingURL=diff-informed-analysis-utils.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"diff-informed-analysis-utils.js","sourceRoot":"","sources":["../src/diff-informed-analysis-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6CA,8EASC;AASD,0EAiBC;AAYD,0DAUC;AAED,wDAaC;AArHD,uCAAyB;AACzB,2CAA6B;AAE7B,wDAA0C;AAE1C,4DAA8C;AAE9C,mDAA6D;AAQ7D,SAAS,sBAAsB;IAC7B,MAAM,WAAW,GAAG,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC;IACxD,IAAI,WAAW,EAAE,CAAC;QAChB,OAAO;YACL,IAAI,EAAE,WAAW,CAAC,IAAI,CAAC,GAAG;YAC1B,uEAAuE;YACvE,uEAAuE;YACvE,yDAAyD;YACzD,IAAI,EAAE,WAAW,CAAC,IAAI,CAAC,KAAK;SAC7B,CAAC;IACJ,CAAC;IAED,0EAA0E;IAC1E,qEAAqE;IACrE,MAAM,eAAe,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC;IACtD,MAAM,sBAAsB,GAAG,OAAO,CAAC,GAAG,CAAC,yBAAyB,CAAC;IACrE,IAAI,eAAe,IAAI,sBAAsB,EAAE,CAAC;QAC9C,OAAO;YACL,IAAI,EAAE,sBAAsB;YAC5B,yEAAyE;YACzE,6DAA6D;YAC7D,IAAI,EAAE,eAAe;SACtB,CAAC;IACJ,CAAC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;GAEG;AACI,KAAK,UAAU,iCAAiC,CACrD,MAAc,EACd,QAA2B,EAC3B,MAAc;IAEd,OAAO,CACL,CAAC,MAAM,+BAA+B,CAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;QACjE,SAAS,CACV,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACI,KAAK,UAAU,+BAA+B,CACnD,MAAc,EACd,QAA2B,EAC3B,MAAc;IAEd,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,mBAAmB,EAAE,MAAM,CAAC,CAAC,EAAE,CAAC;QACpE,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,MAAM,QAAQ,GAAG,sBAAsB,EAAE,CAAC;IAC1C,IAAI,CAAC,QAAQ,EAAE,CAAC;QACd,MAAM,CAAC,IAAI,CACT,wCAAwC;YACtC,8CAA8C,CACjD,CAAC;IACJ,CAAC;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC;AAQD,SAAS,yBAAyB;IAChC,OAAO,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,oBAAoB,CAAC,CAAC;AAC9E,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAAc,EACd,MAAwB;IAExB,MAAM,YAAY,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;IACrD,MAAM,YAAY,GAAG,yBAAyB,EAAE,CAAC;IACjD,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,YAAY,CAAC,CAAC;IAC7C,MAAM,CAAC,KAAK,CACV,oCAAoC,YAAY,MAAM,YAAY,EAAE,CACrE,CAAC;AACJ,CAAC;AAED,SAAgB,sBAAsB,CACpC,MAAc;IAEd,MAAM,YAAY,GAAG,yBAAyB,EAAE,CAAC;IACjD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE,CAAC;QACjC,MAAM,CAAC,KAAK,CAAC,2CAA2C,YAAY,EAAE,CAAC,CAAC;QACxE,OAAO,SAAS,CAAC;IACnB,CAAC;IACD,MAAM,YAAY,GAAG,EAAE,CAAC,YAAY,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC;IAC3D,MAAM,CAAC,KAAK,CACV,qCAAqC,YAAY,MAAM,YAAY,EAAE,CACtE,CAAC;IACF,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,CAAqB,CAAC;AACtD,CAAC"}

9
lib/feature-flags.js generated
View File

@@ -68,7 +68,6 @@ var Feature;
Feature["ExtractToToolcache"] = "extract_to_toolcache";
Feature["PythonDefaultIsToNotExtractStdlib"] = "python_default_is_to_not_extract_stdlib";
Feature["QaTelemetryEnabled"] = "qa_telemetry_enabled";
Feature["RustAnalysis"] = "rust_analysis";
Feature["ZstdBundleStreamingExtraction"] = "zstd_bundle_streaming_extraction";
})(Feature || (exports.Feature = Feature = {}));
exports.featureConfig = {
@@ -96,7 +95,8 @@ exports.featureConfig = {
[Feature.DiffInformedQueries]: {
defaultValue: false,
envVar: "CODEQL_ACTION_DIFF_INFORMED_QUERIES",
minimumVersion: "2.21.0",
minimumVersion: undefined,
toolsFeature: tools_features_1.ToolsFeature.DatabaseInterpretResultsSupportsSarifRunProperty,
},
[Feature.DisableCsharpBuildless]: {
defaultValue: false,
@@ -132,11 +132,6 @@ exports.featureConfig = {
minimumVersion: undefined,
toolsFeature: tools_features_1.ToolsFeature.PythonDefaultIsToNotExtractStdlib,
},
[Feature.RustAnalysis]: {
defaultValue: false,
envVar: "CODEQL_ACTION_RUST_ANALYSIS",
minimumVersion: "2.19.3",
},
[Feature.QaTelemetryEnabled]: {
defaultValue: false,
envVar: "CODEQL_ACTION_QA_TELEMETRY",

File diff suppressed because one or more lines are too long

126
lib/git-utils.js generated
View File

@@ -33,7 +33,7 @@ var __importStar = (this && this.__importStar) || (function () {
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.getFileOidsUnderPath = exports.getGitRoot = exports.decodeGitFilePath = exports.gitRepack = exports.gitFetch = exports.deepenGitHistory = exports.determineBaseBranchHeadCommitOid = exports.getCommitOid = exports.runGitCommand = void 0;
exports.decodeGitFilePath = exports.getGitDiffHunkHeaders = exports.getAllGitMergeBases = exports.gitRepack = exports.gitFetch = exports.deepenGitHistory = exports.determineBaseBranchHeadCommitOid = exports.getCommitOid = void 0;
exports.getRef = getRef;
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
const core = __importStar(require("@actions/core"));
@@ -41,7 +41,7 @@ const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const io = __importStar(require("@actions/io"));
const actions_util_1 = require("./actions-util");
const util_1 = require("./util");
const runGitCommand = async function (workingDirectory, args, customErrorMessage) {
async function runGitCommand(checkoutPath, args, customErrorMessage) {
let stdout = "";
let stderr = "";
core.debug(`Running git command: git ${args.join(" ")}`);
@@ -56,7 +56,7 @@ const runGitCommand = async function (workingDirectory, args, customErrorMessage
stderr += data.toString();
},
},
cwd: workingDirectory,
cwd: checkoutPath,
}).exec();
return stdout;
}
@@ -69,8 +69,7 @@ const runGitCommand = async function (workingDirectory, args, customErrorMessage
core.info(`git call failed. ${customErrorMessage} Error: ${reason}`);
throw error;
}
};
exports.runGitCommand = runGitCommand;
}
/**
* Gets the SHA of the commit that is currently checked out.
*/
@@ -83,7 +82,7 @@ const getCommitOid = async function (checkoutPath, ref = "HEAD") {
// Even if this does go wrong, it's not a huge problem for the alerts to
// reported on the merge commit.
try {
const stdout = await (0, exports.runGitCommand)(checkoutPath, ["rev-parse", ref], "Continuing with commit SHA from user input or environment.");
const stdout = await runGitCommand(checkoutPath, ["rev-parse", ref], "Continuing with commit SHA from user input or environment.");
return stdout.trim();
}
catch {
@@ -107,7 +106,7 @@ const determineBaseBranchHeadCommitOid = async function (checkoutPathOverride) {
let commitOid = "";
let baseOid = "";
let headOid = "";
const stdout = await (0, exports.runGitCommand)(checkoutPath, ["show", "-s", "--format=raw", mergeSha], "Will calculate the base branch SHA on the server.");
const stdout = await runGitCommand(checkoutPath, ["show", "-s", "--format=raw", mergeSha], "Will calculate the base branch SHA on the server.");
for (const data of stdout.split("\n")) {
if (data.startsWith("commit ") && commitOid === "") {
commitOid = data.substring(7);
@@ -142,7 +141,7 @@ exports.determineBaseBranchHeadCommitOid = determineBaseBranchHeadCommitOid;
*/
const deepenGitHistory = async function () {
try {
await (0, exports.runGitCommand)((0, actions_util_1.getOptionalInput)("checkout_path"), [
await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), [
"fetch",
"origin",
"HEAD",
@@ -164,7 +163,7 @@ exports.deepenGitHistory = deepenGitHistory;
*/
const gitFetch = async function (branch, extraFlags) {
try {
await (0, exports.runGitCommand)((0, actions_util_1.getOptionalInput)("checkout_path"), ["fetch", "--no-tags", ...extraFlags, "origin", `${branch}:${branch}`], `Cannot fetch ${branch}.`);
await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), ["fetch", "--no-tags", ...extraFlags, "origin", `${branch}:${branch}`], `Cannot fetch ${branch}.`);
}
catch {
// Errors are already logged by runGitCommand()
@@ -179,13 +178,68 @@ exports.gitFetch = gitFetch;
*/
const gitRepack = async function (flags) {
try {
await (0, exports.runGitCommand)((0, actions_util_1.getOptionalInput)("checkout_path"), ["repack", ...flags], "Cannot repack the repository.");
await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), ["repack", ...flags], "Cannot repack the repository.");
}
catch {
// Errors are already logged by runGitCommand()
}
};
exports.gitRepack = gitRepack;
/**
* Compute the all merge bases between the given refs. Returns an empty array
* if no merge base is found, or if there is an error.
*
* This function uses the `checkout_path` to determine the repository path and
* works only when called from `analyze` or `upload-sarif`.
*/
const getAllGitMergeBases = async function (refs) {
try {
const stdout = await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), ["merge-base", "--all", ...refs], `Cannot get merge base of ${refs}.`);
return stdout.trim().split("\n");
}
catch {
return [];
}
};
exports.getAllGitMergeBases = getAllGitMergeBases;
/**
* Compute the diff hunk headers between the two given refs.
*
* This function uses the `checkout_path` to determine the repository path and
* works only when called from `analyze` or `upload-sarif`.
*
* @returns an array of diff hunk headers (one element per line), or undefined
* if the action was not triggered by a pull request, or if the diff could not
* be determined.
*/
const getGitDiffHunkHeaders = async function (fromRef, toRef) {
let stdout = "";
try {
stdout = await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), [
"-c",
"core.quotePath=false",
"diff",
"--no-renames",
"--irreversible-delete",
"-U0",
fromRef,
toRef,
], `Cannot get diff from ${fromRef} to ${toRef}.`);
}
catch {
return undefined;
}
const headers = [];
for (const line of stdout.split("\n")) {
if (line.startsWith("--- ") ||
line.startsWith("+++ ") ||
line.startsWith("@@ ")) {
headers.push(line);
}
}
return headers;
};
exports.getGitDiffHunkHeaders = getGitDiffHunkHeaders;
/**
* Decode, if necessary, a file path produced by Git. See
* https://git-scm.com/docs/git-config#Documentation/git-config.txt-corequotePath
@@ -231,58 +285,6 @@ const decodeGitFilePath = function (filePath) {
return filePath;
};
exports.decodeGitFilePath = decodeGitFilePath;
/**
* Get the root of the Git repository.
*
* @param sourceRoot The source root of the code being analyzed.
* @returns The root of the Git repository.
*/
const getGitRoot = async function (sourceRoot) {
try {
const stdout = await (0, exports.runGitCommand)(sourceRoot, ["rev-parse", "--show-toplevel"], `Cannot find Git repository root from the source root ${sourceRoot}.`);
return stdout.trim();
}
catch {
// Errors are already logged by runGitCommand()
return undefined;
}
};
exports.getGitRoot = getGitRoot;
/**
* Returns the Git OIDs of all tracked files (in the index and in the working
* tree) that are under the given base path, including files in active
* submodules. Untracked files and files not under the given base path are
* ignored.
*
* @param basePath A path into the Git repository.
* @returns a map from file paths (relative to `basePath`) to Git OIDs.
* @throws {Error} if "git ls-tree" produces unexpected output.
*/
const getFileOidsUnderPath = async function (basePath) {
// Without the --full-name flag, the path is relative to the current working
// directory of the git command, which is basePath.
const stdout = await (0, exports.runGitCommand)(basePath, ["ls-files", "--recurse-submodules", "--format=%(objectname)_%(path)"], "Cannot list Git OIDs of tracked files.");
const fileOidMap = {};
// With --format=%(objectname)_%(path), the output is a list of lines like:
// 30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js
// d89514599a9a99f22b4085766d40af7b99974827_lib/git-utils.js.map
const regex = /^([0-9a-f]{40})_(.+)$/;
for (const line of stdout.split("\n")) {
if (line) {
const match = line.match(regex);
if (match) {
const oid = match[1];
const path = (0, exports.decodeGitFilePath)(match[2]);
fileOidMap[path] = oid;
}
else {
throw new Error(`Unexpected "git ls-files" output: ${line}`);
}
}
}
return fileOidMap;
};
exports.getFileOidsUnderPath = getFileOidsUnderPath;
function getRefFromEnv() {
// To workaround a limitation of Actions dynamic workflows not setting
// the GITHUB_REF in some cases, we accept also the ref within the

File diff suppressed because one or more lines are too long

71
lib/git-utils.test.js generated
View File

@@ -265,75 +265,4 @@ const util_1 = require("./util");
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\vbar"'), "foo\vbar");
t.deepEqual(gitUtils.decodeGitFilePath('"\\a\\b\\f\\n\\r\\t\\v"'), "\x07\b\f\n\r\t\v");
});
(0, ava_1.default)("getFileOidsUnderPath returns correct file mapping", async (t) => {
const runGitCommandStub = sinon
.stub(gitUtils, "runGitCommand")
.resolves("30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js\n" +
"d89514599a9a99f22b4085766d40af7b99974827_lib/git-utils.js.map\n" +
"a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts");
try {
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
t.deepEqual(result, {
"lib/git-utils.js": "30d998ded095371488be3a729eb61d86ed721a18",
"lib/git-utils.js.map": "d89514599a9a99f22b4085766d40af7b99974827",
"src/git-utils.ts": "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96",
});
t.deepEqual(runGitCommandStub.firstCall.args, [
"/fake/path",
["ls-files", "--recurse-submodules", "--format=%(objectname)_%(path)"],
"Cannot list Git OIDs of tracked files.",
]);
}
finally {
runGitCommandStub.restore();
}
});
(0, ava_1.default)("getFileOidsUnderPath handles quoted paths", async (t) => {
const runGitCommandStub = sinon
.stub(gitUtils, "runGitCommand")
.resolves("30d998ded095371488be3a729eb61d86ed721a18_lib/normal-file.js\n" +
'd89514599a9a99f22b4085766d40af7b99974827_"lib/file with spaces.js"\n' +
'a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_"lib/file\\twith\\ttabs.js"');
try {
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
t.deepEqual(result, {
"lib/normal-file.js": "30d998ded095371488be3a729eb61d86ed721a18",
"lib/file with spaces.js": "d89514599a9a99f22b4085766d40af7b99974827",
"lib/file\twith\ttabs.js": "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96",
});
}
finally {
runGitCommandStub.restore();
}
});
(0, ava_1.default)("getFileOidsUnderPath handles empty output", async (t) => {
const runGitCommandStub = sinon
.stub(gitUtils, "runGitCommand")
.resolves("");
try {
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
t.deepEqual(result, {});
}
finally {
runGitCommandStub.restore();
}
});
(0, ava_1.default)("getFileOidsUnderPath throws on unexpected output format", async (t) => {
const runGitCommandStub = sinon
.stub(gitUtils, "runGitCommand")
.resolves("30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js\n" +
"invalid-line-format\n" +
"a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts");
try {
await t.throwsAsync(async () => {
await gitUtils.getFileOidsUnderPath("/fake/path");
}, {
instanceOf: Error,
message: 'Unexpected "git ls-files" output: invalid-line-format',
});
}
finally {
runGitCommandStub.restore();
}
});
//# sourceMappingURL=git-utils.test.js.map

File diff suppressed because one or more lines are too long

View File

@@ -173,7 +173,7 @@ async function removeUploadedSarif(uploadFailedSarifResult, logger) {
logger.info(`In test mode, therefore deleting the failed analysis to avoid impacting tool status for the Action repository. SARIF ID to delete: ${sarifID}.`);
const client = (0, api_client_1.getApiClient)();
try {
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
// Wait to make sure the analysis is ready for download before requesting it.
await (0, util_1.delay)(5000);
// Get the analysis associated with the uploaded sarif

File diff suppressed because one or more lines are too long

View File

@@ -59,15 +59,14 @@ async function runWrapper() {
(0, actions_util_1.restoreInputs)();
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
config = await (0, config_utils_1.getConfig)((0, actions_util_1.getTemporaryDirectory)(), logger);
if (config === undefined) {
logger.warning("Debugging artifacts are unavailable since the 'init' Action failed before it could produce any.");
return;
}
else {
uploadFailedSarifResult = await initActionPostHelper.run(debugArtifacts.tryUploadAllAvailableDebugArtifacts, actions_util_1.printDebugLogs, config, repositoryNwo, features, logger);
}
uploadFailedSarifResult = await initActionPostHelper.run(debugArtifacts.tryUploadAllAvailableDebugArtifacts, actions_util_1.printDebugLogs, config, repositoryNwo, features, logger);
}
catch (unwrappedError) {
const error = (0, util_1.wrapError)(unwrappedError);
@@ -87,9 +86,7 @@ async function runWrapper() {
...uploadFailedSarifResult,
job_status: initActionPostHelper.getFinalJobStatus(),
};
logger.info("Sending status report for init-post step.");
await (0, status_report_1.sendStatusReport)(statusReport);
logger.info("Status report sent for init-post step.");
}
}
void runWrapper();

View File

@@ -1 +1 @@
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,iDAIwB;AACxB,6CAAgD;AAChD,iDAAmD;AACnD,kEAAoD;AACpD,mDAA2C;AAC3C,gFAAkE;AAClE,uCAA6C;AAC7C,6CAAgD;AAChD,mDAOyB;AACzB,iCAA8E;AAO9E,KAAK,UAAU,UAAU;IACvB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,MAA0B,CAAC;IAC/B,IAAI,uBAES,CAAC;IACd,IAAI,CAAC;QACH,qCAAqC;QACrC,IAAA,4BAAa,GAAE,CAAC;QAEhB,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,aAAa,GAAG,IAAA,6BAAgB,GAAE,CAAC;QACzC,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC1D,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;QACJ,CAAC;aAAM,CAAC;YACN,uBAAuB,GAAG,MAAM,oBAAoB,CAAC,GAAG,CACtD,cAAc,CAAC,mCAAmC,EAClD,6BAAc,EACd,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAE9B,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,QAAQ,EACnB,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CAAC;QACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;QAC3C,CAAC;QACD,OAAO;IACT,CAAC;IACD,MAAM,SAAS,GAAG,oBAAoB,CAAC,iBAAiB,EAAE,CAAC;IAC3D,MAAM,CAAC,IAAI,CAAC,yBAAyB,IAAA,uCAAuB,EAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IAE5E,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,QAAQ,EACnB,SAAS,EACT,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,YAAY,GAAyB;YACzC,GAAG,gBAAgB;YACnB,GAAG,uBAAuB;YAC1B,UAAU,EAAE,oBAAoB,CAAC,iBAAiB,EAAE;SACrD,CAAC;QACF,MAAM,CAAC,IAAI,CAAC,2CAA2C,CAAC,CAAC;QACzD,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,wCAAwC,CAAC,CAAC;IACxD,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,iDAIwB;AACxB,6CAAgD;AAChD,iDAAmD;AACnD,kEAAoD;AACpD,mDAA2C;AAC3C,gFAAkE;AAClE,uCAA6C;AAC7C,6CAAkD;AAClD,mDAOyB;AACzB,iCAKgB;AAOhB,KAAK,UAAU,UAAU;IACvB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,MAA0B,CAAC;IAC/B,IAAI,uBAES,CAAC;IACd,IAAI,CAAC;QACH,qCAAqC;QACrC,IAAA,4BAAa,GAAE,CAAC;QAEhB,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC1D,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;YACF,OAAO;QACT,CAAC;QAED,uBAAuB,GAAG,MAAM,oBAAoB,CAAC,GAAG,CACtD,cAAc,CAAC,mCAAmC,EAClD,6BAAc,EACd,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;IACJ,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAE9B,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,QAAQ,EACnB,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CAAC;QACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;QAC3C,CAAC;QACD,OAAO;IACT,CAAC;IACD,MAAM,SAAS,GAAG,oBAAoB,CAAC,iBAAiB,EAAE,CAAC;IAC3D,MAAM,CAAC,IAAI,CAAC,yBAAyB,IAAA,uCAAuB,EAAC,SAAS,CAAC,GAAG,CAAC,CAAC;IAE5E,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,QAAQ,EACnB,SAAS,EACT,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,YAAY,GAAyB;YACzC,GAAG,gBAAgB;YACnB,GAAG,uBAAuB;YAC1B,UAAU,EAAE,oBAAoB,CAAC,iBAAiB,EAAE;SACrD,CAAC;QACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

39
lib/init-action.js generated
View File

@@ -37,7 +37,6 @@ const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const core = __importStar(require("@actions/core"));
const io = __importStar(require("@actions/io"));
const semver = __importStar(require("semver"));
const uuid_1 = require("uuid");
const actions_util_1 = require("./actions-util");
const api_client_1 = require("./api-client");
@@ -50,7 +49,6 @@ const feature_flags_1 = require("./feature-flags");
const init_1 = require("./init");
const languages_1 = require("./languages");
const logging_1 = require("./logging");
const overlay_database_utils_1 = require("./overlay-database-utils");
const repository_1 = require("./repository");
const setup_codeql_1 = require("./setup-codeql");
const status_report_1 = require("./status-report");
@@ -160,7 +158,7 @@ async function run() {
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
(0, util_1.checkActionVersion)((0, actions_util_1.getActionVersion)(), gitHubVersion);
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
const jobRunUuid = (0, uuid_1.v4)();
logger.info(`Job run UUID is ${jobRunUuid}.`);
@@ -229,12 +227,7 @@ async function run() {
return;
}
try {
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
const overlayDatabaseMode = await (0, init_1.getOverlayDatabaseMode)((await codeql.getVersion()).version, config, sourceRoot, logger);
logger.info(`Using overlay database mode: ${overlayDatabaseMode}`);
if (overlayDatabaseMode !== overlay_database_utils_1.OverlayDatabaseMode.Overlay) {
(0, init_1.cleanupDatabaseClusterDirectory)(config, logger);
}
(0, init_1.cleanupDatabaseClusterDirectory)(config, logger);
if (zstdAvailability) {
await recordZstdAvailability(config, zstdAvailability);
}
@@ -324,11 +317,6 @@ async function run() {
if (await features.getValue(feature_flags_1.Feature.DisableKotlinAnalysisEnabled)) {
core.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true");
}
const kotlinLimitVar = "CODEQL_EXTRACTOR_KOTLIN_OVERRIDE_MAXIMUM_VERSION_LIMIT";
if ((await (0, util_1.codeQlVersionAtLeast)(codeql, "2.20.3")) &&
!(await (0, util_1.codeQlVersionAtLeast)(codeql, "2.20.4"))) {
core.exportVariable(kotlinLimitVar, "2.1.20");
}
if (config.languages.includes(languages_1.Language.cpp)) {
const envVar = "CODEQL_EXTRACTOR_CPP_TRAP_CACHING";
if (process.env[envVar]) {
@@ -352,26 +340,6 @@ async function run() {
logger.info(`Setting C++ build-mode: none to ${value}`);
core.exportVariable(bmnVar, value);
}
// Set CODEQL_ENABLE_EXPERIMENTAL_FEATURES for rust
if (config.languages.includes(languages_1.Language.rust)) {
const feat = feature_flags_1.Feature.RustAnalysis;
const minVer = feature_flags_1.featureConfig[feat].minimumVersion;
const envVar = "CODEQL_ENABLE_EXPERIMENTAL_FEATURES";
// if in default setup, it means the feature flag was on when rust was enabled
// if the feature flag gets turned off, let's not have rust analysis throwing a configuration error
// in that case rust analysis will be disabled only when default setup is refreshed
if ((0, actions_util_1.isDefaultSetup)() || (await features.getValue(feat, codeql))) {
core.exportVariable(envVar, "true");
}
if (process.env[envVar] !== "true") {
throw new util_1.ConfigurationError(`Experimental and not officially supported Rust analysis requires setting ${envVar}=true in the environment`);
}
const actualVer = (await codeql.getVersion()).version;
if (semver.lt(actualVer, minVer)) {
throw new util_1.ConfigurationError(`Experimental rust analysis is supported by CodeQL CLI version ${minVer} or higher, but found version ${actualVer}`);
}
logger.info("Experimental rust analysis enabled");
}
// Restore dependency cache(s), if they exist.
if ((0, caching_utils_1.shouldRestoreCache)(config.dependencyCachingEnabled)) {
await (0, dependency_caching_1.downloadDependencyCaches)(config.languages, logger);
@@ -414,7 +382,8 @@ async function run() {
core.exportVariable("CODEQL_EXTRACTOR_PYTHON_EXTRACT_STDLIB", "true");
}
}
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", (0, actions_util_1.getOptionalInput)("registries"), apiDetails, overlayDatabaseMode, logger);
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", (0, actions_util_1.getOptionalInput)("registries"), apiDetails, logger);
if (tracerConfig !== undefined) {
for (const [key, value] of Object.entries(tracerConfig.env)) {
core.exportVariable(key, value);

File diff suppressed because one or more lines are too long

34
lib/init.js generated
View File

@@ -35,7 +35,6 @@ var __importStar = (this && this.__importStar) || (function () {
Object.defineProperty(exports, "__esModule", { value: true });
exports.initCodeQL = initCodeQL;
exports.initConfig = initConfig;
exports.getOverlayDatabaseMode = getOverlayDatabaseMode;
exports.runInit = runInit;
exports.printPathFiltersWarning = printPathFiltersWarning;
exports.checkInstallPython311 = checkInstallPython311;
@@ -44,13 +43,10 @@ const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
const io = __importStar(require("@actions/io"));
const semver = __importStar(require("semver"));
const actions_util_1 = require("./actions-util");
const codeql_1 = require("./codeql");
const configUtils = __importStar(require("./config-utils"));
const git_utils_1 = require("./git-utils");
const languages_1 = require("./languages");
const overlay_database_utils_1 = require("./overlay-database-utils");
const tools_features_1 = require("./tools-features");
const tracer_config_1 = require("./tracer-config");
const util = __importStar(require("./util"));
@@ -77,33 +73,7 @@ async function initConfig(inputs, codeql) {
logger.endGroup();
return config;
}
async function getOverlayDatabaseMode(codeqlVersion, config, sourceRoot, logger) {
const overlayDatabaseMode = process.env.CODEQL_OVERLAY_DATABASE_MODE;
if (overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.Overlay ||
overlayDatabaseMode === overlay_database_utils_1.OverlayDatabaseMode.OverlayBase) {
if (config.buildMode !== util.BuildMode.None) {
logger.warning(`Cannot build an ${overlayDatabaseMode} database because ` +
`build-mode is set to "${config.buildMode}" instead of "none". ` +
"Falling back to creating a normal full database instead.");
return overlay_database_utils_1.OverlayDatabaseMode.None;
}
if (semver.lt(codeqlVersion, overlay_database_utils_1.CODEQL_OVERLAY_MINIMUM_VERSION)) {
logger.warning(`Cannot build an ${overlayDatabaseMode} database because ` +
`the CodeQL CLI is older than ${overlay_database_utils_1.CODEQL_OVERLAY_MINIMUM_VERSION}. ` +
"Falling back to creating a normal full database instead.");
return overlay_database_utils_1.OverlayDatabaseMode.None;
}
if ((await (0, git_utils_1.getGitRoot)(sourceRoot)) === undefined) {
logger.warning(`Cannot build an ${overlayDatabaseMode} database because ` +
`the source root "${sourceRoot}" is not inside a git repository. ` +
"Falling back to creating a normal full database instead.");
return overlay_database_utils_1.OverlayDatabaseMode.None;
}
return overlayDatabaseMode;
}
return overlay_database_utils_1.OverlayDatabaseMode.None;
}
async function runInit(codeql, config, sourceRoot, processName, registriesInput, apiDetails, overlayDatabaseMode, logger) {
async function runInit(codeql, config, sourceRoot, processName, registriesInput, apiDetails, logger) {
fs.mkdirSync(config.dbLocation, { recursive: true });
const { registriesAuthTokens, qlconfigFile } = await configUtils.generateRegistries(registriesInput, config.tempDir, logger);
await configUtils.wrapEnvironment({
@@ -111,7 +81,7 @@ async function runInit(codeql, config, sourceRoot, processName, registriesInput,
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
},
// Init a database cluster
async () => await codeql.databaseInitCluster(config, sourceRoot, processName, qlconfigFile, overlayDatabaseMode, logger));
async () => await codeql.databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger));
return await (0, tracer_config_1.getCombinedTracerConfig)(codeql, config);
}
function printPathFiltersWarning(config, logger) {

View File

@@ -1 +1 @@
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0BA,gCAyCC;AAED,gCAgBC;AAED,wDAuCC;AAED,0BAoCC;AAED,0DAeC;AAMD,sDAkBC;AAED,0EAkDC;AAjQD,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,gDAAkC;AAClC,+CAAiC;AAEjC,iDAAsE;AAEtE,qCAA+C;AAC/C,4DAA8C;AAE9C,2CAAyC;AACzC,2CAA0D;AAE1D,qEAGkC;AAIlC,qDAAgD;AAChD,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,UAA8B,EAC9B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,iBAA2C,EAC3C,QAA2B,EAC3B,MAAc;IAQd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EACJ,MAAM,EACN,yBAAyB,EACzB,WAAW,EACX,YAAY,EACZ,gBAAgB,GACjB,GAAG,MAAM,IAAA,oBAAW,EACnB,UAAU,EACV,UAAU,EACV,OAAO,EACP,OAAO,EACP,iBAAiB,EACjB,MAAM,EACN,QAAQ,EACR,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO;QACL,MAAM;QACN,yBAAyB;QACzB,WAAW;QACX,YAAY;QACZ,gBAAgB;KACjB,CAAC;AACJ,CAAC;AAEM,KAAK,UAAU,UAAU,CAC9B,MAAoC,EACpC,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;IAC7B,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IACpD,IACE,CAAC,CAAC,MAAM,MAAM,CAAC,eAAe,CAC5B,6BAAY,CAAC,kCAAkC,CAChD,CAAC,EACF,CAAC;QACD,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAEM,KAAK,UAAU,sBAAsB,CAC1C,aAAqB,EACrB,MAA0B,EAC1B,UAAkB,EAClB,MAAc;IAEd,MAAM,mBAAmB,GAAG,OAAO,CAAC,GAAG,CAAC,4BAA4B,CAAC;IAErE,IACE,mBAAmB,KAAK,4CAAmB,CAAC,OAAO;QACnD,mBAAmB,KAAK,4CAAmB,CAAC,WAAW,EACvD,CAAC;QACD,IAAI,MAAM,CAAC,SAAS,KAAK,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,CAAC;YAC7C,MAAM,CAAC,OAAO,CACZ,mBAAmB,mBAAmB,oBAAoB;gBACxD,yBAAyB,MAAM,CAAC,SAAS,uBAAuB;gBAChE,0DAA0D,CAC7D,CAAC;YACF,OAAO,4CAAmB,CAAC,IAAI,CAAC;QAClC,CAAC;QACD,IAAI,MAAM,CAAC,EAAE,CAAC,aAAa,EAAE,uDAA8B,CAAC,EAAE,CAAC;YAC7D,MAAM,CAAC,OAAO,CACZ,mBAAmB,mBAAmB,oBAAoB;gBACxD,gCAAgC,uDAA8B,IAAI;gBAClE,0DAA0D,CAC7D,CAAC;YACF,OAAO,4CAAmB,CAAC,IAAI,CAAC;QAClC,CAAC;QACD,IAAI,CAAC,MAAM,IAAA,sBAAU,EAAC,UAAU,CAAC,CAAC,KAAK,SAAS,EAAE,CAAC;YACjD,MAAM,CAAC,OAAO,CACZ,mBAAmB,mBAAmB,oBAAoB;gBACxD,oBAAoB,UAAU,oCAAoC;gBAClE,0DAA0D,CAC7D,CAAC;YACF,OAAO,4CAAmB,CAAC,IAAI,CAAC;QAClC,CAAC;QACD,OAAO,mBAA0C,CAAC;IACpD,CAAC;IACD,OAAO,4CAAmB,CAAC,IAAI,CAAC;AAClC,CAAC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,eAAmC,EACnC,UAAoC,EACpC,mBAAwC,EACxC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,MAAM,EAAE,oBAAoB,EAAE,YAAY,EAAE,GAC1C,MAAM,WAAW,CAAC,kBAAkB,CAClC,eAAe,EACf,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC;IACJ,MAAM,WAAW,CAAC,eAAe,CAC/B;QACE,YAAY,EAAE,UAAU,CAAC,IAAI;QAC7B,sBAAsB,EAAE,oBAAoB;KAC7C;IAED,0BAA0B;IAC1B,KAAK,IAAI,EAAE,CACT,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,YAAY,EACZ,mBAAmB,EACnB,MAAM,CACP,CACJ,CAAC;IACF,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,EAAE,MAAM;QACrC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,EAAE,MAAM,CAAC;QACnD,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,6BAAiB,CAAC,EAC1C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;GAGG;AACI,KAAK,UAAU,qBAAqB,CACzC,SAAqB,EACrB,MAAc;IAEd,IACE,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;QACnC,OAAO,CAAC,QAAQ,KAAK,OAAO;QAC5B,CAAC,CAAC,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC,CAAC,QAAQ,EAAE,iBAAiB,EACxD,CAAC;QACD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CACzB,SAAS,EACT,iBAAiB,EACjB,oBAAoB,CACrB,CAAC;QACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE,IAAI,CAAC,EAAE;YAClE,MAAM;SACP,CAAC,CAAC,IAAI,EAAE,CAAC;IACZ,CAAC;AACH,CAAC;AAED,SAAgB,+BAA+B,CAC7C,MAA0B,EAC1B,MAAc;AACd,+FAA+F;AAC/F,eAAe;AACf,MAAM,GAAG,EAAE,CAAC,MAAM;IAElB,IACE,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC;QAChC,CAAC,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,EAAE;YACtC,EAAE,CAAC,WAAW,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,EAC3C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,kCAAkC,MAAM,CAAC,UAAU,4CAA4C,CAChG,CAAC;QACF,IAAI,CAAC;YACH,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE;gBACxB,KAAK,EAAE,IAAI;gBACX,UAAU,EAAE,CAAC;gBACb,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,IAAI,CACT,yCAAyC,MAAM,CAAC,UAAU,GAAG,CAC9D,CAAC;QACJ,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,MAAM,KAAK,GAAG,mEACZ,IAAA,+BAAgB,EAAC,aAAa,CAAC;gBAC7B,CAAC,CAAC,sCAAsC,MAAM,CAAC,UAAU,IAAI;gBAC7D,CAAC,CAAC,kCAAkC,MAAM,CAAC,UAAU,IAAI;oBACvD,yEACN,iEAAiE,CAAC;YAElE,kGAAkG;YAClG,IAAI,IAAA,iCAAkB,GAAE,EAAE,CAAC;gBACzB,MAAM,IAAI,IAAI,CAAC,kBAAkB,CAC/B,GAAG,KAAK,4GAA4G;oBAClH,sEAAsE,IAAI,CAAC,eAAe,CACxF,CAAC,CACF,EAAE,CACN,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACN,MAAM,IAAI,KAAK,CACb,GAAG,KAAK,sDAAsD;oBAC5D,+EAA+E;oBAC/E,yCAAyC,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,CACrE,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC"}
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAoBA,gCAyCC;AAED,gCAgBC;AAED,0BAkCC;AAED,0DAeC;AAMD,sDAkBC;AAED,0EAkDC;AAhND,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,gDAAkC;AAElC,iDAAsE;AAEtE,qCAA+C;AAC/C,4DAA8C;AAE9C,2CAA0D;AAK1D,qDAAgD;AAChD,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,UAA8B,EAC9B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,iBAA2C,EAC3C,QAA2B,EAC3B,MAAc;IAQd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EACJ,MAAM,EACN,yBAAyB,EACzB,WAAW,EACX,YAAY,EACZ,gBAAgB,GACjB,GAAG,MAAM,IAAA,oBAAW,EACnB,UAAU,EACV,UAAU,EACV,OAAO,EACP,OAAO,EACP,iBAAiB,EACjB,MAAM,EACN,QAAQ,EACR,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO;QACL,MAAM;QACN,yBAAyB;QACzB,WAAW;QACX,YAAY;QACZ,gBAAgB;KACjB,CAAC;AACJ,CAAC;AAEM,KAAK,UAAU,UAAU,CAC9B,MAAoC,EACpC,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;IAC7B,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IACpD,IACE,CAAC,CAAC,MAAM,MAAM,CAAC,eAAe,CAC5B,6BAAY,CAAC,kCAAkC,CAChD,CAAC,EACF,CAAC;QACD,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,CAAC;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,eAAmC,EACnC,UAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,MAAM,EAAE,oBAAoB,EAAE,YAAY,EAAE,GAC1C,MAAM,WAAW,CAAC,kBAAkB,CAClC,eAAe,EACf,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC;IACJ,MAAM,WAAW,CAAC,eAAe,CAC/B;QACE,YAAY,EAAE,UAAU,CAAC,IAAI;QAC7B,sBAAsB,EAAE,oBAAoB;KAC7C;IAED,0BAA0B;IAC1B,KAAK,IAAI,EAAE,CACT,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,YAAY,EACZ,MAAM,CACP,CACJ,CAAC;IACF,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;AACvD,CAAC;AAED,SAAgB,uBAAuB,CACrC,MAA0B,EAC1B,MAAc;IAEd,qEAAqE;IACrE,sEAAsE;IACtE,IACE,CAAC,MAAM,CAAC,iBAAiB,CAAC,KAAK,EAAE,MAAM;QACrC,MAAM,CAAC,iBAAiB,CAAC,cAAc,CAAC,EAAE,MAAM,CAAC;QACnD,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,6BAAiB,CAAC,EAC1C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,mGAAmG,CACpG,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;GAGG;AACI,KAAK,UAAU,qBAAqB,CACzC,SAAqB,EACrB,MAAc;IAEd,IACE,SAAS,CAAC,QAAQ,CAAC,oBAAQ,CAAC,MAAM,CAAC;QACnC,OAAO,CAAC,QAAQ,KAAK,OAAO;QAC5B,CAAC,CAAC,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC,CAAC,QAAQ,EAAE,iBAAiB,EACxD,CAAC;QACD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CACzB,SAAS,EACT,iBAAiB,EACjB,oBAAoB,CACrB,CAAC;QACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE,IAAI,CAAC,EAAE;YAClE,MAAM;SACP,CAAC,CAAC,IAAI,EAAE,CAAC;IACZ,CAAC;AACH,CAAC;AAED,SAAgB,+BAA+B,CAC7C,MAA0B,EAC1B,MAAc;AACd,+FAA+F;AAC/F,eAAe;AACf,MAAM,GAAG,EAAE,CAAC,MAAM;IAElB,IACE,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC;QAChC,CAAC,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,EAAE;YACtC,EAAE,CAAC,WAAW,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,EAC3C,CAAC;QACD,MAAM,CAAC,OAAO,CACZ,kCAAkC,MAAM,CAAC,UAAU,4CAA4C,CAChG,CAAC;QACF,IAAI,CAAC;YACH,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE;gBACxB,KAAK,EAAE,IAAI;gBACX,UAAU,EAAE,CAAC;gBACb,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,CAAC,IAAI,CACT,yCAAyC,MAAM,CAAC,UAAU,GAAG,CAC9D,CAAC;QACJ,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,MAAM,KAAK,GAAG,mEACZ,IAAA,+BAAgB,EAAC,aAAa,CAAC;gBAC7B,CAAC,CAAC,sCAAsC,MAAM,CAAC,UAAU,IAAI;gBAC7D,CAAC,CAAC,kCAAkC,MAAM,CAAC,UAAU,IAAI;oBACvD,yEACN,iEAAiE,CAAC;YAElE,kGAAkG;YAClG,IAAI,IAAA,iCAAkB,GAAE,EAAE,CAAC;gBACzB,MAAM,IAAI,IAAI,CAAC,kBAAkB,CAC/B,GAAG,KAAK,4GAA4G;oBAClH,sEAAsE,IAAI,CAAC,eAAe,CACxF,CAAC,CACF,EAAE,CACN,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACN,MAAM,IAAI,KAAK,CACb,GAAG,KAAK,sDAAsD;oBAC5D,+EAA+E;oBAC/E,yCAAyC,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,CACrE,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC;AACH,CAAC"}

View File

@@ -1,129 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.CODEQL_OVERLAY_MINIMUM_VERSION = exports.OverlayDatabaseMode = void 0;
exports.writeBaseDatabaseOidsFile = writeBaseDatabaseOidsFile;
exports.writeOverlayChangesFile = writeOverlayChangesFile;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const actions_util_1 = require("./actions-util");
const git_utils_1 = require("./git-utils");
var OverlayDatabaseMode;
(function (OverlayDatabaseMode) {
OverlayDatabaseMode["Overlay"] = "overlay";
OverlayDatabaseMode["OverlayBase"] = "overlay-base";
OverlayDatabaseMode["None"] = "none";
})(OverlayDatabaseMode || (exports.OverlayDatabaseMode = OverlayDatabaseMode = {}));
exports.CODEQL_OVERLAY_MINIMUM_VERSION = "2.20.5";
/**
* Writes a JSON file containing Git OIDs for all tracked files (represented
* by path relative to the source root) under the source root. The file is
* written into the database location specified in the config.
*
* @param config The configuration object containing the database location
* @param sourceRoot The root directory containing the source files to process
* @throws {Error} If the Git repository root cannot be determined
*/
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await (0, git_utils_1.getFileOidsUnderPath)(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
await fs.promises.writeFile(baseDatabaseOidsFilePath, gitFileOidsJson);
}
/**
* Reads and parses the JSON file containing the base database Git OIDs.
* This file contains the mapping of file paths to their corresponding Git OIDs
* that was previously written by writeBaseDatabaseOidsFile().
*
* @param config The configuration object containing the database location
* @param logger The logger instance to use for error reporting
* @returns An object mapping file paths (relative to source root) to their Git OIDs
* @throws {Error} If the file cannot be read or parsed
*/
async function readBaseDatabaseOidsFile(config, logger) {
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
try {
const contents = await fs.promises.readFile(baseDatabaseOidsFilePath, "utf-8");
return JSON.parse(contents);
}
catch (e) {
logger.error("Failed to read overlay-base file OIDs from " +
`${baseDatabaseOidsFilePath}: ${e.message || e}`);
throw e;
}
}
function getBaseDatabaseOidsFilePath(config) {
return path.join(config.dbLocation, "base-database-oids.json");
}
/**
* Writes a JSON file containing the source-root-relative paths of files under
* `sourceRoot` that have changed (added, removed, or modified) from the overlay
* base database.
*
* This function uses the Git index to determine which files have changed, so it
* requires the following preconditions, both when this function is called and
* when the overlay-base database was initialized:
*
* - It requires that `sourceRoot` is inside a Git repository.
* - It assumes that all changes in the working tree are staged in the index.
* - It assumes that all files of interest are tracked by Git, e.g. not covered
* by `.gitignore`.
*/
async function writeOverlayChangesFile(config, sourceRoot, logger) {
const baseFileOids = await readBaseDatabaseOidsFile(config, logger);
const overlayFileOids = await (0, git_utils_1.getFileOidsUnderPath)(sourceRoot);
const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids);
logger.info(`Found ${changedFiles.length} changed file(s) under ${sourceRoot}.`);
const changedFilesJson = JSON.stringify({ changes: changedFiles });
const overlayChangesFile = path.join((0, actions_util_1.getTemporaryDirectory)(), "overlay-changes.json");
logger.debug(`Writing overlay changed files to ${overlayChangesFile}: ${changedFilesJson}`);
await fs.promises.writeFile(overlayChangesFile, changedFilesJson);
return overlayChangesFile;
}
function computeChangedFiles(baseFileOids, overlayFileOids) {
const changes = [];
for (const [file, oid] of Object.entries(overlayFileOids)) {
if (!(file in baseFileOids) || baseFileOids[file] !== oid) {
changes.push(file);
}
}
for (const file of Object.keys(baseFileOids)) {
if (!(file in overlayFileOids)) {
changes.push(file);
}
}
return changes;
}
//# sourceMappingURL=overlay-database-utils.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"overlay-database-utils.js","sourceRoot":"","sources":["../src/overlay-database-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAyBA,8DAQC;AAkDD,0DAsBC;AAzGD,uCAAyB;AACzB,2CAA6B;AAE7B,iDAAuD;AAEvD,2CAAmD;AAGnD,IAAY,mBAIX;AAJD,WAAY,mBAAmB;IAC7B,0CAAmB,CAAA;IACnB,mDAA4B,CAAA;IAC5B,oCAAa,CAAA;AACf,CAAC,EAJW,mBAAmB,mCAAnB,mBAAmB,QAI9B;AAEY,QAAA,8BAA8B,GAAG,QAAQ,CAAC;AAEvD;;;;;;;;GAQG;AACI,KAAK,UAAU,yBAAyB,CAC7C,MAAc,EACd,UAAkB;IAElB,MAAM,WAAW,GAAG,MAAM,IAAA,gCAAoB,EAAC,UAAU,CAAC,CAAC;IAC3D,MAAM,eAAe,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IACpD,MAAM,wBAAwB,GAAG,2BAA2B,CAAC,MAAM,CAAC,CAAC;IACrE,MAAM,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,wBAAwB,EAAE,eAAe,CAAC,CAAC;AACzE,CAAC;AAED;;;;;;;;;GASG;AACH,KAAK,UAAU,wBAAwB,CACrC,MAAc,EACd,MAAc;IAEd,MAAM,wBAAwB,GAAG,2BAA2B,CAAC,MAAM,CAAC,CAAC;IACrE,IAAI,CAAC;QACH,MAAM,QAAQ,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,QAAQ,CACzC,wBAAwB,EACxB,OAAO,CACR,CAAC;QACF,OAAO,IAAI,CAAC,KAAK,CAAC,QAAQ,CAA8B,CAAC;IAC3D,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,CAAC,KAAK,CACV,6CAA6C;YAC3C,GAAG,wBAAwB,KAAM,CAAS,CAAC,OAAO,IAAI,CAAC,EAAE,CAC5D,CAAC;QACF,MAAM,CAAC,CAAC;IACV,CAAC;AACH,CAAC;AAED,SAAS,2BAA2B,CAAC,MAAc;IACjD,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE,yBAAyB,CAAC,CAAC;AACjE,CAAC;AAED;;;;;;;;;;;;;GAaG;AACI,KAAK,UAAU,uBAAuB,CAC3C,MAAc,EACd,UAAkB,EAClB,MAAc;IAEd,MAAM,YAAY,GAAG,MAAM,wBAAwB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACpE,MAAM,eAAe,GAAG,MAAM,IAAA,gCAAoB,EAAC,UAAU,CAAC,CAAC;IAC/D,MAAM,YAAY,GAAG,mBAAmB,CAAC,YAAY,EAAE,eAAe,CAAC,CAAC;IACxE,MAAM,CAAC,IAAI,CACT,SAAS,YAAY,CAAC,MAAM,0BAA0B,UAAU,GAAG,CACpE,CAAC;IAEF,MAAM,gBAAgB,GAAG,IAAI,CAAC,SAAS,CAAC,EAAE,OAAO,EAAE,YAAY,EAAE,CAAC,CAAC;IACnE,MAAM,kBAAkB,GAAG,IAAI,CAAC,IAAI,CAClC,IAAA,oCAAqB,GAAE,EACvB,sBAAsB,CACvB,CAAC;IACF,MAAM,CAAC,KAAK,CACV,oCAAoC,kBAAkB,KAAK,gBAAgB,EAAE,CAC9E,CAAC;IACF,MAAM,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;IAClE,OAAO,kBAAkB,CAAC;AAC5B,CAAC;AAED,SAAS,mBAAmB,CAC1B,YAAuC,EACvC,eAA0C;IAE1C,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,KAAK,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,eAAe,CAAC,EAAE,CAAC;QAC1D,IAAI,CAAC,CAAC,IAAI,IAAI,YAAY,CAAC,IAAI,YAAY,CAAC,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;YAC1D,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACrB,CAAC;IACH,CAAC;IACD,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE,CAAC;QAC7C,IAAI,CAAC,CAAC,IAAI,IAAI,eAAe,CAAC,EAAE,CAAC;YAC/B,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACrB,CAAC;IACH,CAAC;IACD,OAAO,OAAO,CAAC;AACjB,CAAC"}

View File

@@ -1,94 +0,0 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const ava_1 = __importDefault(require("ava"));
const sinon = __importStar(require("sinon"));
const actionsUtil = __importStar(require("./actions-util"));
const gitUtils = __importStar(require("./git-utils"));
const logging_1 = require("./logging");
const overlay_database_utils_1 = require("./overlay-database-utils");
const testing_utils_1 = require("./testing-utils");
const util_1 = require("./util");
(0, testing_utils_1.setupTests)(ava_1.default);
(0, ava_1.default)("writeOverlayChangesFile generates correct changes file", async (t) => {
await (0, util_1.withTmpDir)(async (tmpDir) => {
const dbLocation = path.join(tmpDir, "db");
await fs.promises.mkdir(dbLocation, { recursive: true });
const sourceRoot = path.join(tmpDir, "src");
await fs.promises.mkdir(sourceRoot, { recursive: true });
const tempDir = path.join(tmpDir, "temp");
await fs.promises.mkdir(tempDir, { recursive: true });
const logger = (0, logging_1.getRunnerLogger)(true);
const config = (0, testing_utils_1.createTestConfig)({ dbLocation });
// Mock the getFileOidsUnderPath function to return base OIDs
const baseOids = {
"unchanged.js": "aaa111",
"modified.js": "bbb222",
"deleted.js": "ccc333",
};
const getFileOidsStubForBase = sinon
.stub(gitUtils, "getFileOidsUnderPath")
.resolves(baseOids);
// Write the base database OIDs file
await (0, overlay_database_utils_1.writeBaseDatabaseOidsFile)(config, sourceRoot);
getFileOidsStubForBase.restore();
// Mock the getFileOidsUnderPath function to return overlay OIDs
const currentOids = {
"unchanged.js": "aaa111",
"modified.js": "ddd444", // Changed OID
"added.js": "eee555", // New file
};
const getFileOidsStubForOverlay = sinon
.stub(gitUtils, "getFileOidsUnderPath")
.resolves(currentOids);
// Write the overlay changes file, which uses the mocked overlay OIDs
// and the base database OIDs file
const getTempDirStub = sinon
.stub(actionsUtil, "getTemporaryDirectory")
.returns(tempDir);
const changesFilePath = await (0, overlay_database_utils_1.writeOverlayChangesFile)(config, sourceRoot, logger);
getFileOidsStubForOverlay.restore();
getTempDirStub.restore();
const fileContent = await fs.promises.readFile(changesFilePath, "utf-8");
const parsedContent = JSON.parse(fileContent);
t.deepEqual(parsedContent.changes.sort(), ["added.js", "deleted.js", "modified.js"], "Should identify added, deleted, and modified files");
});
});
//# sourceMappingURL=overlay-database-utils.test.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"overlay-database-utils.test.js","sourceRoot":"","sources":["../src/overlay-database-utils.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,sDAAwC;AACxC,uCAA4C;AAC5C,qEAGkC;AAClC,mDAA+D;AAC/D,iCAAoC;AAEpC,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,wDAAwD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACzE,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAChC,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;QAC3C,MAAM,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QACzD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;QAC5C,MAAM,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QACzD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAC1C,MAAM,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAEtD,MAAM,MAAM,GAAG,IAAA,yBAAe,EAAC,IAAI,CAAC,CAAC;QACrC,MAAM,MAAM,GAAG,IAAA,gCAAgB,EAAC,EAAE,UAAU,EAAE,CAAC,CAAC;QAEhD,6DAA6D;QAC7D,MAAM,QAAQ,GAAG;YACf,cAAc,EAAE,QAAQ;YACxB,aAAa,EAAE,QAAQ;YACvB,YAAY,EAAE,QAAQ;SACvB,CAAC;QACF,MAAM,sBAAsB,GAAG,KAAK;aACjC,IAAI,CAAC,QAAQ,EAAE,sBAAsB,CAAC;aACtC,QAAQ,CAAC,QAAQ,CAAC,CAAC;QAEtB,oCAAoC;QACpC,MAAM,IAAA,kDAAyB,EAAC,MAAM,EAAE,UAAU,CAAC,CAAC;QACpD,sBAAsB,CAAC,OAAO,EAAE,CAAC;QAEjC,gEAAgE;QAChE,MAAM,WAAW,GAAG;YAClB,cAAc,EAAE,QAAQ;YACxB,aAAa,EAAE,QAAQ,EAAE,cAAc;YACvC,UAAU,EAAE,QAAQ,EAAE,WAAW;SAClC,CAAC;QACF,MAAM,yBAAyB,GAAG,KAAK;aACpC,IAAI,CAAC,QAAQ,EAAE,sBAAsB,CAAC;aACtC,QAAQ,CAAC,WAAW,CAAC,CAAC;QAEzB,qEAAqE;QACrE,kCAAkC;QAClC,MAAM,cAAc,GAAG,KAAK;aACzB,IAAI,CAAC,WAAW,EAAE,uBAAuB,CAAC;aAC1C,OAAO,CAAC,OAAO,CAAC,CAAC;QACpB,MAAM,eAAe,GAAG,MAAM,IAAA,gDAAuB,EACnD,MAAM,EACN,UAAU,EACV,MAAM,CACP,CAAC;QACF,yBAAyB,CAAC,OAAO,EAAE,CAAC;QACpC,cAAc,CAAC,OAAO,EAAE,CAAC;QAEzB,MAAM,WAAW,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,eAAe,EAAE,OAAO,CAAC,CAAC;QACzE,MAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,WAAW,CAA0B,CAAC;QAEvE,CAAC,CAAC,SAAS,CACT,aAAa,CAAC,OAAO,CAAC,IAAI,EAAE,EAC5B,CAAC,UAAU,EAAE,YAAY,EAAE,aAAa,CAAC,EACzC,oDAAoD,CACrD,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}

26
lib/repository.js generated
View File

@@ -1,33 +1,7 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getRepositoryNwo = getRepositoryNwo;
exports.getRepositoryNwoFromEnv = getRepositoryNwoFromEnv;
exports.parseRepositoryNwo = parseRepositoryNwo;
const util_1 = require("./util");
/**
* Get the repository name with owner from the environment variable
* `GITHUB_REPOSITORY`.
*
* @returns The repository name with owner.
*/
function getRepositoryNwo() {
return getRepositoryNwoFromEnv("GITHUB_REPOSITORY");
}
/**
* Get the repository name with owner from the first environment variable that
* is set and non-empty.
*
* @param envVarNames The names of the environment variables to check.
* @returns The repository name with owner.
* @throws ConfigurationError if none of the environment variables are set.
*/
function getRepositoryNwoFromEnv(...envVarNames) {
const envVarName = envVarNames.find((name) => process.env[name]);
if (!envVarName) {
throw new util_1.ConfigurationError(`None of the env vars ${envVarNames.join(", ")} are set`);
}
return parseRepositoryNwo((0, util_1.getRequiredEnvParam)(envVarName));
}
function parseRepositoryNwo(input) {
const parts = input.split("/");
if (parts.length !== 2) {

View File

@@ -1 +1 @@
{"version":3,"file":"repository.js","sourceRoot":"","sources":["../src/repository.ts"],"names":[],"mappings":";;AAcA,4CAEC;AAUD,0DAUC;AAED,gDASC;AA/CD,iCAAiE;AAQjE;;;;;GAKG;AACH,SAAgB,gBAAgB;IAC9B,OAAO,uBAAuB,CAAC,mBAAmB,CAAC,CAAC;AACtD,CAAC;AAED;;;;;;;GAOG;AACH,SAAgB,uBAAuB,CACrC,GAAG,WAAqB;IAExB,MAAM,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC;IACjE,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,MAAM,IAAI,yBAAkB,CAC1B,wBAAwB,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CACzD,CAAC;IACJ,CAAC;IACD,OAAO,kBAAkB,CAAC,IAAA,0BAAmB,EAAC,UAAU,CAAC,CAAC,CAAC;AAC7D,CAAC;AAED,SAAgB,kBAAkB,CAAC,KAAa;IAC9C,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACvB,MAAM,IAAI,yBAAkB,CAAC,IAAI,KAAK,kCAAkC,CAAC,CAAC;IAC5E,CAAC;IACD,OAAO;QACL,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC;QACf,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;KACf,CAAC;AACJ,CAAC"}
{"version":3,"file":"repository.js","sourceRoot":"","sources":["../src/repository.ts"],"names":[],"mappings":";;AAQA,gDASC;AAjBD,iCAA4C;AAQ5C,SAAgB,kBAAkB,CAAC,KAAa;IAC9C,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACvB,MAAM,IAAI,yBAAkB,CAAC,IAAI,KAAK,kCAAkC,CAAC,CAAC;IAC5E,CAAC;IACD,OAAO;QACL,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC;QACf,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;KACf,CAAC;AACJ,CAAC"}

View File

@@ -53,12 +53,7 @@ async function runWrapper() {
// Kill the running proxy
const pid = core.getState("proxy-process-pid");
if (pid) {
try {
process.kill(Number(pid));
}
catch (error) {
logger.error(`Failed to kill proxy process: ${(0, util_1.getErrorMessage)(error)}`);
}
process.kill(Number(pid));
}
const config = await configUtils.getConfig(actionsUtil.getTemporaryDirectory(), logger);
if ((config && config.debugMode) || core.isDebug()) {

View File

@@ -1 +1 @@
{"version":3,"file":"start-proxy-action-post.js","sourceRoot":"","sources":["../src/start-proxy-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,4DAA8C;AAC9C,uDAA8D;AAC9D,uCAA6C;AAC7C,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,IAAI,CAAC;QACH,4CAA4C;QAC5C,WAAW,CAAC,aAAa,EAAE,CAAC;QAE5B,yBAAyB;QACzB,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;QAC/C,IAAI,GAAG,EAAE,CAAC;YACR,IAAI,CAAC;gBACH,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;YAC5B,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,MAAM,CAAC,KAAK,CAAC,iCAAiC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAAC,CAAC;YAC1E,CAAC;QACH,CAAC;QAED,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CACxC,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QAEF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE,CAAC;YACnD,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC;YACpD,MAAM,CAAC,IAAI,CACT,wEAAwE,CACzE,CAAC;YACF,IAAI,MAAM,EAAE,aAAa,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC7C,MAAM,CAAC,OAAO,CACZ,qFAAqF,CACtF,CAAC;gBACF,OAAO;YACT,CAAC;YACD,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;YAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;YAEjD,MAAM,gBAAgB,GAAG,MAAM,IAAA,2CAAyB,EACtD,MAAM,EACN,aAAa,CAAC,IAAI,CACnB,CAAC;YAEF,MAAM,gBAAgB,CAAC,cAAc,CACnC,gBAAgB,EAChB,CAAC,WAAW,CAAC,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC;gBACE,wFAAwF;gBACxF,aAAa,EAAE,CAAC;aACjB,CACF,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,gEAAgE;QAChE,MAAM,CAAC,OAAO,CACZ,wCAAwC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CACjE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"start-proxy-action-post.js","sourceRoot":"","sources":["../src/start-proxy-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,4DAA8C;AAC9C,uDAA8D;AAC9D,uCAA6C;AAC7C,iCAAoE;AAEpE,KAAK,UAAU,UAAU;IACvB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,IAAI,CAAC;QACH,4CAA4C;QAC5C,WAAW,CAAC,aAAa,EAAE,CAAC;QAE5B,yBAAyB;QACzB,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;QAC/C,IAAI,GAAG,EAAE,CAAC;YACR,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;QAC5B,CAAC;QAED,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CACxC,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QAEF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE,CAAC;YACnD,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAC;YACpD,MAAM,CAAC,IAAI,CACT,wEAAwE,CACzE,CAAC;YACF,IAAI,MAAM,EAAE,aAAa,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC7C,MAAM,CAAC,OAAO,CACZ,qFAAqF,CACtF,CAAC;gBACF,OAAO;YACT,CAAC;YACD,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;YAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;YAEjD,MAAM,gBAAgB,GAAG,MAAM,IAAA,2CAAyB,EACtD,MAAM,EACN,aAAa,CAAC,IAAI,CACnB,CAAC;YAEF,MAAM,gBAAgB,CAAC,cAAc,CACnC,gBAAgB,EAChB,CAAC,WAAW,CAAC,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC;gBACE,wFAAwF;gBACxF,aAAa,EAAE,CAAC;aACjB,CACF,CAAC;QACJ,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,gEAAgE;QAChE,MAAM,CAAC,OAAO,CACZ,wCAAwC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CACjE,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

View File

@@ -43,8 +43,8 @@ const logging_1 = require("./logging");
const start_proxy_1 = require("./start-proxy");
const util = __importStar(require("./util"));
const UPDATEJOB_PROXY = "update-job-proxy";
const UPDATEJOB_PROXY_VERSION = "v2.0.20250424171100";
const UPDATEJOB_PROXY_URL_PREFIX = "https://github.com/github/codeql-action/releases/download/codeql-bundle-v2.21.1/";
const UPDATEJOB_PROXY_VERSION = "v2.0.20241023203727";
const UPDATEJOB_PROXY_URL_PREFIX = "https://github.com/github/codeql-action/releases/download/codeql-bundle-v2.18.1/";
const KEY_SIZE = 2048;
const KEY_EXPIRY_YEARS = 2;
const CERT_SUBJECT = [
@@ -112,8 +112,7 @@ async function runWrapper() {
ca,
};
// Start the Proxy
const proxyBin = actionsUtil.getOptionalInput("proxy_binary") ??
(await getProxyBinaryPath());
const proxyBin = await getProxyBinaryPath();
await startProxy(proxyBin, proxyConfig, proxyLogFilePath, logger);
}
async function startProxy(binPath, config, logFilePath, logger) {

File diff suppressed because one or more lines are too long

15
lib/status-report.js generated
View File

@@ -35,7 +35,6 @@ var __importStar = (this && this.__importStar) || (function () {
Object.defineProperty(exports, "__esModule", { value: true });
exports.JobStatus = exports.ActionName = void 0;
exports.isFirstPartyAnalysis = isFirstPartyAnalysis;
exports.isThirdPartyAnalysis = isThirdPartyAnalysis;
exports.getActionsStatus = getActionsStatus;
exports.getJobStatusDisplayName = getJobStatusDisplayName;
exports.createStatusReportBase = createStatusReportBase;
@@ -47,7 +46,6 @@ const api_client_1 = require("./api-client");
const doc_url_1 = require("./doc-url");
const environment_1 = require("./environment");
const git_utils_1 = require("./git-utils");
const repository_1 = require("./repository");
const util_1 = require("./util");
var ActionName;
(function (ActionName) {
@@ -72,12 +70,6 @@ function isFirstPartyAnalysis(actionName) {
}
return process.env[environment_1.EnvVar.INIT_ACTION_HAS_RUN] === "true";
}
/**
* @returns true if the analysis is considered to be third party.
*/
function isThirdPartyAnalysis(actionName) {
return !isFirstPartyAnalysis(actionName);
}
/** Overall status of the entire job. String values match the Hydro schema. */
var JobStatus;
(function (JobStatus) {
@@ -256,12 +248,13 @@ async function sendStatusReport(statusReport) {
core.debug("In test mode. Status reports are not uploaded.");
return;
}
const nwo = (0, repository_1.getRepositoryNwo)();
const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY");
const [owner, repo] = nwo.split("/");
const client = (0, api_client_1.getApiClient)();
try {
await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
owner: nwo.owner,
repo: nwo.repo,
owner,
repo,
data: statusReportJSON,
});
}

File diff suppressed because one or more lines are too long

View File

@@ -109,14 +109,4 @@ function setupEnvironmentAndStub(tmpDir) {
t.is((await (0, status_report_1.createStatusReportBase)(status_report_1.ActionName.Analyze, "failure", new Date("May 19, 2023 05:19:00"), (0, testing_utils_1.createTestConfig)({}), { numAvailableBytes: 100, numTotalBytes: 500 }, (0, logging_1.getRunnerLogger)(false), "failure cause", "exception stack trace"))?.first_party_analysis, true);
});
});
(0, ava_1.default)("getActionStatus handling correctly various types of errors", (t) => {
t.is((0, status_report_1.getActionsStatus)(new Error("arbitrary error")), "failure", "We categorise an arbitrary error as a failure");
t.is((0, status_report_1.getActionsStatus)(new util_1.ConfigurationError("arbitrary error")), "user-error", "We categorise a ConfigurationError as a user error");
t.is((0, status_report_1.getActionsStatus)(new Error("exit code 1"), "multiple things went wrong"), "failure", "getActionsStatus should return failure if passed an arbitrary error and an additional failure cause");
t.is((0, status_report_1.getActionsStatus)(new util_1.ConfigurationError("exit code 1"), "multiple things went wrong"), "user-error", "getActionsStatus should return user-error if passed a configuration error and an additional failure cause");
t.is((0, status_report_1.getActionsStatus)(), "success", "getActionsStatus should return success if no error is passed");
t.is((0, status_report_1.getActionsStatus)(new Object()), "failure", "getActionsStatus should return failure if passed an arbitrary object");
t.is((0, status_report_1.getActionsStatus)(null, "an error occurred"), "failure", "getActionsStatus should return failure if passed null and an additional failure cause");
t.is((0, status_report_1.getActionsStatus)((0, util_1.wrapError)(new util_1.ConfigurationError("arbitrary error"))), "user-error", "We still recognise a wrapped ConfigurationError as a user error");
});
//# sourceMappingURL=status-report.test.js.map

File diff suppressed because one or more lines are too long

11
lib/tar.js generated
View File

@@ -143,16 +143,7 @@ async function extractTarZst(tar, dest, tarVersion, logger) {
: ""}`);
try {
// Initialize args
//
// `--ignore-zeros` means that trailing zero bytes at the end of an archive will be read
// by `tar` in case a further concatenated archive follows. Otherwise when a tarball built
// by GNU tar, which writes many trailing zeroes, is read by BSD tar, which expects less, then
// BSD tar can hang up the pipe to its filter program early, and if that program is `zstd`
// then it will try to write the remaining zeroes, get an EPIPE error because `tar` has closed
// its end of the pipe, return 1, and `tar` will pass the error along.
//
// See also https://github.com/facebook/zstd/issues/4294
const args = ["-x", "--zstd", "--ignore-zeros"];
const args = ["-x", "--zstd"];
if (tarVersion.type === "gnu") {
// Suppress warnings when using GNU tar to extract archives created by BSD tar
args.push("--warning=no-unknown-keyword");

File diff suppressed because one or more lines are too long

66
lib/upload-lib.js generated
View File

@@ -44,8 +44,6 @@ exports.validateSarifFileSchema = validateSarifFileSchema;
exports.buildPayload = buildPayload;
exports.uploadFiles = uploadFiles;
exports.waitForProcessing = waitForProcessing;
exports.shouldConsiderConfigurationError = shouldConsiderConfigurationError;
exports.shouldConsiderInvalidRequest = shouldConsiderInvalidRequest;
exports.validateUniqueCategory = validateUniqueCategory;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
@@ -60,7 +58,6 @@ const api = __importStar(require("./api-client"));
const api_client_1 = require("./api-client");
const codeql_1 = require("./codeql");
const config_utils_1 = require("./config-utils");
const diff_informed_analysis_utils_1 = require("./diff-informed-analysis-utils");
const environment_1 = require("./environment");
const fingerprints = __importStar(require("./fingerprints"));
const gitUtils = __importStar(require("./git-utils"));
@@ -410,20 +407,11 @@ async function uploadFiles(sarifPath, checkoutPath, category, features, logger)
logger.startGroup("Uploading results");
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
try {
// Validate that the files we were asked to upload are all valid SARIF files
for (const file of sarifFiles) {
validateSarifFileSchema(file, logger);
}
}
catch (e) {
if (e instanceof SyntaxError) {
throw new InvalidSarifUploadError(e.message);
}
throw e;
// Validate that the files we were asked to upload are all valid SARIF files
for (const file of sarifFiles) {
validateSarifFileSchema(file, logger);
}
let sarif = await combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger);
sarif = filterAlertsByDiffRange(logger, sarif);
sarif = await fingerprints.addFingerprints(sarif, checkoutPath, logger);
const analysisKey = await api.getAnalysisKey();
const environment = actionsUtil.getRequiredInput("matrix");
@@ -445,7 +433,7 @@ async function uploadFiles(sarifPath, checkoutPath, category, features, logger)
const numResultInSarif = countResultsInSarif(sarifPayload);
logger.debug(`Number of results in upload: ${numResultInSarif}`);
// Make the upload
const sarifID = await uploadPayload(payload, (0, repository_1.getRepositoryNwo)(), logger);
const sarifID = await uploadPayload(payload, (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), logger);
logger.endGroup();
return {
statusReport: {
@@ -534,12 +522,9 @@ async function waitForProcessing(repositoryNwo, sarifID, logger, options = {
* Returns whether the provided processing errors are a configuration error.
*/
function shouldConsiderConfigurationError(processingErrors) {
const expectedConfigErrors = [
"CodeQL analyses from advanced configurations cannot be processed when the default setup is enabled",
"rejecting delivery as the repository has too many logical alerts",
];
return (processingErrors.length === 1 &&
expectedConfigErrors.some((msg) => processingErrors[0].includes(msg)));
processingErrors[0] ===
"CodeQL analyses from advanced configurations cannot be processed when the default setup is enabled");
}
/**
* Returns whether the provided processing errors are the result of an invalid SARIF upload request.
@@ -622,43 +607,4 @@ class InvalidSarifUploadError extends Error {
}
}
exports.InvalidSarifUploadError = InvalidSarifUploadError;
function filterAlertsByDiffRange(logger, sarif) {
const diffRanges = (0, diff_informed_analysis_utils_1.readDiffRangesJsonFile)(logger);
if (!diffRanges?.length) {
return sarif;
}
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
for (const run of sarif.runs) {
if (run.results) {
run.results = run.results.filter((result) => {
const locations = [
...(result.locations || []).map((loc) => loc.physicalLocation),
...(result.relatedLocations || []).map((loc) => loc.physicalLocation),
];
return locations.some((physicalLocation) => {
const locationUri = physicalLocation?.artifactLocation?.uri;
const locationStartLine = physicalLocation?.region?.startLine;
if (!locationUri || locationStartLine === undefined) {
return false;
}
// CodeQL always uses forward slashes as the path separator, so on Windows we
// need to replace any backslashes with forward slashes.
const locationPath = path
.join(checkoutPath, locationUri)
.replaceAll(path.sep, "/");
// Alert filtering here replicates the same behavior as the restrictAlertsTo
// extensible predicate in CodeQL. See the restrictAlertsTo documentation
// https://codeql.github.com/codeql-standard-libraries/csharp/codeql/util/AlertFiltering.qll/predicate.AlertFiltering$restrictAlertsTo.3.html
// for more details, such as why the filtering applies only to the first line
// of an alert location.
return diffRanges.some((range) => range.path === locationPath &&
((range.startLine <= locationStartLine &&
range.endLine >= locationStartLine) ||
(range.startLine === 0 && range.endLine === 0)));
});
});
}
}
return sarif;
}
//# sourceMappingURL=upload-lib.js.map

File diff suppressed because one or more lines are too long

35
lib/upload-lib.test.js generated
View File

@@ -244,41 +244,6 @@ ava_1.default.beforeEach(() => {
type: util_1.GitHubVariant.DOTCOM,
}));
});
(0, ava_1.default)("shouldConsiderConfigurationError correctly detects configuration errors", (t) => {
const error1 = [
"CodeQL analyses from advanced configurations cannot be processed when the default setup is enabled",
];
t.true(uploadLib.shouldConsiderConfigurationError(error1));
const error2 = [
"rejecting delivery as the repository has too many logical alerts",
];
t.true(uploadLib.shouldConsiderConfigurationError(error2));
// We fail cases where we get > 1 error messages back
const error3 = [
"rejecting delivery as the repository has too many alerts",
"extra error message",
];
t.false(uploadLib.shouldConsiderConfigurationError(error3));
});
(0, ava_1.default)("shouldConsiderInvalidRequest returns correct recognises processing errors", (t) => {
const error1 = [
"rejecting SARIF",
"an invalid URI was provided as a SARIF location",
];
t.true(uploadLib.shouldConsiderInvalidRequest(error1));
const error2 = [
"locationFromSarifResult: expected artifact location",
"an invalid URI was provided as a SARIF location",
];
t.true(uploadLib.shouldConsiderInvalidRequest(error2));
// We expect ALL errors to be of processing errors, for the outcome to be classified as
// an invalid SARIF upload error.
const error3 = [
"could not convert rules: invalid security severity value, is not a number",
"an unknown error occurred",
];
t.false(uploadLib.shouldConsiderInvalidRequest(error3));
});
function createMockSarif(id, tool) {
return {
runs: [

File diff suppressed because one or more lines are too long

View File

@@ -61,7 +61,7 @@ async function run() {
(0, util_1.checkActionVersion)((0, actions_util_1.getActionVersion)(), gitHubVersion);
// Make inputs accessible in the `post` step.
actionsUtil.persistInputs();
const repositoryNwo = (0, repository_1.getRepositoryNwo)();
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
const startingStatusReportBase = await (0, status_report_1.createStatusReportBase)(status_report_1.ActionName.UploadSarif, "starting", startedAt, undefined, await (0, util_1.checkDiskUsage)(logger), logger);
if (startingStatusReportBase !== undefined) {
@@ -75,12 +75,12 @@ async function run() {
core.debug("In test mode. Waiting for processing is disabled.");
}
else if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.getRepositoryNwo)(), uploadResult.sarifID, logger);
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, logger);
}
await sendSuccessStatusReport(startedAt, uploadResult.statusReport, logger);
}
catch (unwrappedError) {
const error = (0, status_report_1.isThirdPartyAnalysis)(status_report_1.ActionName.UploadSarif) &&
const error = !(0, status_report_1.isFirstPartyAnalysis)(status_report_1.ActionName.UploadSarif) &&
unwrappedError instanceof upload_lib.InvalidSarifUploadError
? new util_1.ConfigurationError(unwrappedError.message)
: (0, util_1.wrapError)(unwrappedError);

View File

@@ -1 +1 @@
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAAyE;AACzE,6CAAgD;AAChD,mDAA2C;AAC3C,uCAAqD;AACrD,6CAAgD;AAChD,mDAOyB;AACzB,yDAA2C;AAC3C,iCAQgB;AAMhB,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C,EAC1C,MAAc;IAEd,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,WAAW,EACtB,SAAS,EACT,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,YAAY,GAA4B;YAC5C,GAAG,gBAAgB;YACnB,GAAG,WAAW;SACf,CAAC;QACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAE1C,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,IAAA,yBAAkB,EAAC,IAAA,+BAAgB,GAAE,EAAE,aAAa,CAAC,CAAC;IAEtD,6CAA6C;IAC7C,WAAW,CAAC,aAAa,EAAE,CAAC;IAE5B,MAAM,aAAa,GAAG,IAAA,6BAAgB,GAAE,CAAC;IACzC,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;IAEF,MAAM,wBAAwB,GAAG,MAAM,IAAA,sCAAsB,EAC3D,0BAAU,CAAC,WAAW,EACtB,UAAU,EACV,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,wBAAwB,KAAK,SAAS,EAAE,CAAC;QAC3C,MAAM,IAAA,gCAAgB,EAAC,wBAAwB,CAAC,CAAC;IACnD,CAAC;IAED,IAAI,CAAC;QACH,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,WAAW,CAC/C,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,QAAQ,EACR,MAAM,CACP,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE,CAAC;YACnB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;QAClE,CAAC;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE,CAAC;YAC1E,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,6BAAgB,GAAE,EAClB,YAAY,CAAC,OAAO,EACpB,MAAM,CACP,CAAC;QACJ,CAAC;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC;IAC9E,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GACT,IAAA,oCAAoB,EAAC,0BAAU,CAAC,WAAW,CAAC;YAC5C,cAAc,YAAY,UAAU,CAAC,uBAAuB;YAC1D,CAAC,CAAC,IAAI,yBAAkB,CAAC,cAAc,CAAC,OAAO,CAAC;YAChD,CAAC,CAAC,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QAChC,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QAC9B,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QAExB,MAAM,qBAAqB,GAAG,MAAM,IAAA,sCAAsB,EACxD,0BAAU,CAAC,WAAW,EACtB,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,OAAO,EACP,KAAK,CAAC,KAAK,CACZ,CAAC;QACF,IAAI,qBAAqB,KAAK,SAAS,EAAE,CAAC;YACxC,MAAM,IAAA,gCAAgB,EAAC,qBAAqB,CAAC,CAAC;QAChD,CAAC;QACD,OAAO;IACT,CAAC;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,EAAE,CAAC;IACd,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,sCAAsC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC/D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAAyE;AACzE,6CAAgD;AAChD,mDAA2C;AAC3C,uCAAqD;AACrD,6CAAkD;AAClD,mDAOyB;AACzB,yDAA2C;AAC3C,iCASgB;AAMhB,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C,EAC1C,MAAc;IAEd,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,WAAW,EACtB,SAAS,EACT,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,YAAY,GAA4B;YAC5C,GAAG,gBAAgB;YACnB,GAAG,WAAW;SACf,CAAC;QACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;IACvC,CAAC;AACH,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAE1C,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;IAC/C,IAAA,yBAAkB,EAAC,IAAA,+BAAgB,GAAE,EAAE,aAAa,CAAC,CAAC;IAEtD,6CAA6C;IAC7C,WAAW,CAAC,aAAa,EAAE,CAAC;IAE5B,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;IACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;IAEF,MAAM,wBAAwB,GAAG,MAAM,IAAA,sCAAsB,EAC3D,0BAAU,CAAC,WAAW,EACtB,UAAU,EACV,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,wBAAwB,KAAK,SAAS,EAAE,CAAC;QAC3C,MAAM,IAAA,gCAAgB,EAAC,wBAAwB,CAAC,CAAC;IACnD,CAAC;IAED,IAAI,CAAC;QACH,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,WAAW,CAC/C,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,QAAQ,EACR,MAAM,CACP,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE,CAAC;YACnB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;QAClE,CAAC;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE,CAAC;YAC1E,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,MAAM,CACP,CAAC;QACJ,CAAC;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC;IAC9E,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GACT,CAAC,IAAA,oCAAoB,EAAC,0BAAU,CAAC,WAAW,CAAC;YAC7C,cAAc,YAAY,UAAU,CAAC,uBAAuB;YAC1D,CAAC,CAAC,IAAI,yBAAkB,CAAC,cAAc,CAAC,OAAO,CAAC;YAChD,CAAC,CAAC,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QAChC,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QAC9B,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QAExB,MAAM,qBAAqB,GAAG,MAAM,IAAA,sCAAsB,EACxD,0BAAU,CAAC,WAAW,EACtB,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,OAAO,EACP,KAAK,CAAC,KAAK,CACZ,CAAC;QACF,IAAI,qBAAqB,KAAK,SAAS,EAAE,CAAC;YACxC,MAAM,IAAA,gCAAgB,EAAC,qBAAqB,CAAC,CAAC;QAChD,CAAC;QACD,OAAO;IACT,CAAC;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,EAAE,CAAC;IACd,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,sCAAsC,IAAA,sBAAe,EAAC,KAAK,CAAC,EAAE,CAC/D,CAAC;IACJ,CAAC;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

3
lib/util.js generated
View File

@@ -823,11 +823,10 @@ async function checkDiskUsage(logger) {
return undefined;
}
const diskUsage = await (0, check_disk_space_1.default)(getRequiredEnvParam("GITHUB_WORKSPACE"));
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = "The Actions runner is running low on disk space " +
`(${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
`(${(diskUsage.free / gbInBytes).toPrecision(4)} GB available).`;
if (process.env[environment_1.EnvVar.HAS_WARNED_ABOUT_DISK_SPACE] !== "true") {
logger.warning(message);
}

File diff suppressed because one or more lines are too long

1
node_modules/.bin/dot-object generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../dot-object/bin/dot-object

1
node_modules/.bin/protoc-gen-twirp_ts generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../twirp-ts/protoc-gen-twirp_ts

1
node_modules/.bin/tldts generated vendored
View File

@@ -1 +0,0 @@
../tldts/bin/cli.js

2545
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
export * from './google/protobuf/timestamp';
export * from './google/protobuf/wrappers';
export * from './results/api/v1/artifact';
export * from './results/api/v1/artifact.twirp-client';
export * from './results/api/v1/artifact.twirp';

View File

@@ -17,5 +17,5 @@ Object.defineProperty(exports, "__esModule", { value: true });
__exportStar(require("./google/protobuf/timestamp"), exports);
__exportStar(require("./google/protobuf/wrappers"), exports);
__exportStar(require("./results/api/v1/artifact"), exports);
__exportStar(require("./results/api/v1/artifact.twirp-client"), exports);
__exportStar(require("./results/api/v1/artifact.twirp"), exports);
//# sourceMappingURL=index.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/generated/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,8DAA2C;AAC3C,6DAA0C;AAC1C,4DAAyC;AACzC,yEAAsD"}
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/generated/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,8DAA2C;AAC3C,6DAA0C;AAC1C,4DAAyC;AACzC,kEAA+C"}

View File

@@ -8,66 +8,6 @@ import { MessageType } from "@protobuf-ts/runtime";
import { Int64Value } from "../../../google/protobuf/wrappers";
import { StringValue } from "../../../google/protobuf/wrappers";
import { Timestamp } from "../../../google/protobuf/timestamp";
/**
* @generated from protobuf message github.actions.results.api.v1.MigrateArtifactRequest
*/
export interface MigrateArtifactRequest {
/**
* @generated from protobuf field: string workflow_run_backend_id = 1;
*/
workflowRunBackendId: string;
/**
* @generated from protobuf field: string name = 2;
*/
name: string;
/**
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 3;
*/
expiresAt?: Timestamp;
}
/**
* @generated from protobuf message github.actions.results.api.v1.MigrateArtifactResponse
*/
export interface MigrateArtifactResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* @generated from protobuf field: string signed_upload_url = 2;
*/
signedUploadUrl: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
*/
export interface FinalizeMigratedArtifactRequest {
/**
* @generated from protobuf field: string workflow_run_backend_id = 1;
*/
workflowRunBackendId: string;
/**
* @generated from protobuf field: string name = 2;
*/
name: string;
/**
* @generated from protobuf field: int64 size = 3;
*/
size: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
*/
export interface FinalizeMigratedArtifactResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* @generated from protobuf field: int64 artifact_id = 2;
*/
artifactId: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactRequest
*/
@@ -222,12 +162,6 @@ export interface ListArtifactsResponse_MonolithArtifact {
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
*/
createdAt?: Timestamp;
/**
* The SHA-256 digest of the artifact, calculated on upload for upload-artifact v4 & newer
*
* @generated from protobuf field: google.protobuf.StringValue digest = 7;
*/
digest?: StringValue;
}
/**
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
@@ -285,46 +219,6 @@ export interface DeleteArtifactResponse {
*/
artifactId: string;
}
declare class MigrateArtifactRequest$Type extends MessageType<MigrateArtifactRequest> {
constructor();
create(value?: PartialMessage<MigrateArtifactRequest>): MigrateArtifactRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MigrateArtifactRequest): MigrateArtifactRequest;
internalBinaryWrite(message: MigrateArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
*/
export declare const MigrateArtifactRequest: MigrateArtifactRequest$Type;
declare class MigrateArtifactResponse$Type extends MessageType<MigrateArtifactResponse> {
constructor();
create(value?: PartialMessage<MigrateArtifactResponse>): MigrateArtifactResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MigrateArtifactResponse): MigrateArtifactResponse;
internalBinaryWrite(message: MigrateArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
*/
export declare const MigrateArtifactResponse: MigrateArtifactResponse$Type;
declare class FinalizeMigratedArtifactRequest$Type extends MessageType<FinalizeMigratedArtifactRequest> {
constructor();
create(value?: PartialMessage<FinalizeMigratedArtifactRequest>): FinalizeMigratedArtifactRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeMigratedArtifactRequest): FinalizeMigratedArtifactRequest;
internalBinaryWrite(message: FinalizeMigratedArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
*/
export declare const FinalizeMigratedArtifactRequest: FinalizeMigratedArtifactRequest$Type;
declare class FinalizeMigratedArtifactResponse$Type extends MessageType<FinalizeMigratedArtifactResponse> {
constructor();
create(value?: PartialMessage<FinalizeMigratedArtifactResponse>): FinalizeMigratedArtifactResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeMigratedArtifactResponse): FinalizeMigratedArtifactResponse;
internalBinaryWrite(message: FinalizeMigratedArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
*/
export declare const FinalizeMigratedArtifactResponse: FinalizeMigratedArtifactResponse$Type;
declare class CreateArtifactRequest$Type extends MessageType<CreateArtifactRequest> {
constructor();
create(value?: PartialMessage<CreateArtifactRequest>): CreateArtifactRequest;

View File

@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = exports.FinalizeMigratedArtifactResponse = exports.FinalizeMigratedArtifactRequest = exports.MigrateArtifactResponse = exports.MigrateArtifactRequest = void 0;
exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = void 0;
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
// tslint:disable
@@ -14,236 +14,6 @@ const wrappers_1 = require("../../../google/protobuf/wrappers");
const wrappers_2 = require("../../../google/protobuf/wrappers");
const timestamp_1 = require("../../../google/protobuf/timestamp");
// @generated message type with reflection information, may provide speed optimized methods
class MigrateArtifactRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.MigrateArtifactRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
]);
}
create(value) {
const message = { workflowRunBackendId: "", name: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
break;
case /* string name */ 2:
message.name = reader.string();
break;
case /* google.protobuf.Timestamp expires_at */ 3:
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string name = 2; */
if (message.name !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.name);
/* google.protobuf.Timestamp expires_at = 3; */
if (message.expiresAt)
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(3, runtime_1.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
*/
exports.MigrateArtifactRequest = new MigrateArtifactRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class MigrateArtifactResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.MigrateArtifactResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { ok: false, signedUploadUrl: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* string signed_upload_url */ 2:
message.signedUploadUrl = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
/* string signed_upload_url = 2; */
if (message.signedUploadUrl !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
*/
exports.MigrateArtifactResponse = new MigrateArtifactResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeMigratedArtifactRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.FinalizeMigratedArtifactRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { workflowRunBackendId: "", name: "", size: "0" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
break;
case /* string name */ 2:
message.name = reader.string();
break;
case /* int64 size */ 3:
message.size = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string name = 2; */
if (message.name !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.name);
/* int64 size = 3; */
if (message.size !== "0")
writer.tag(3, runtime_1.WireType.Varint).int64(message.size);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
*/
exports.FinalizeMigratedArtifactRequest = new FinalizeMigratedArtifactRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeMigratedArtifactResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.FinalizeMigratedArtifactResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { ok: false, artifactId: "0" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* int64 artifact_id */ 2:
message.artifactId = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
/* int64 artifact_id = 2; */
if (message.artifactId !== "0")
writer.tag(2, runtime_1.WireType.Varint).int64(message.artifactId);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
*/
exports.FinalizeMigratedArtifactResponse = new FinalizeMigratedArtifactResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class CreateArtifactRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.CreateArtifactRequest", [
@@ -625,8 +395,7 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
{ no: 7, name: "digest", kind: "message", T: () => wrappers_2.StringValue }
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp }
]);
}
create(value) {
@@ -659,9 +428,6 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
case /* google.protobuf.Timestamp created_at */ 6:
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
break;
case /* google.protobuf.StringValue digest */ 7:
message.digest = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.digest);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
@@ -692,9 +458,6 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
/* google.protobuf.Timestamp created_at = 6; */
if (message.createdAt)
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* google.protobuf.StringValue digest = 7; */
if (message.digest)
wrappers_2.StringValue.internalBinaryWrite(message.digest, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@@ -936,8 +699,6 @@ exports.ArtifactService = new runtime_rpc_1.ServiceType("github.actions.results.
{ name: "FinalizeArtifact", options: {}, I: exports.FinalizeArtifactRequest, O: exports.FinalizeArtifactResponse },
{ name: "ListArtifacts", options: {}, I: exports.ListArtifactsRequest, O: exports.ListArtifactsResponse },
{ name: "GetSignedArtifactURL", options: {}, I: exports.GetSignedArtifactURLRequest, O: exports.GetSignedArtifactURLResponse },
{ name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse },
{ name: "MigrateArtifact", options: {}, I: exports.MigrateArtifactRequest, O: exports.MigrateArtifactResponse },
{ name: "FinalizeMigratedArtifact", options: {}, I: exports.FinalizeMigratedArtifactRequest, O: exports.FinalizeMigratedArtifactResponse }
{ name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse }
]);
//# sourceMappingURL=artifact.js.map

File diff suppressed because one or more lines are too long

View File

@@ -1,30 +0,0 @@
import { CreateArtifactRequest, CreateArtifactResponse, FinalizeArtifactRequest, FinalizeArtifactResponse, ListArtifactsRequest, ListArtifactsResponse, GetSignedArtifactURLRequest, GetSignedArtifactURLResponse, DeleteArtifactRequest, DeleteArtifactResponse } from "./artifact";
interface Rpc {
request(service: string, method: string, contentType: "application/json" | "application/protobuf", data: object | Uint8Array): Promise<object | Uint8Array>;
}
export interface ArtifactServiceClient {
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
}
export declare class ArtifactServiceClientJSON implements ArtifactServiceClient {
private readonly rpc;
constructor(rpc: Rpc);
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
}
export declare class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
private readonly rpc;
constructor(rpc: Rpc);
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
}
export {};

View File

@@ -1,100 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ArtifactServiceClientProtobuf = exports.ArtifactServiceClientJSON = void 0;
const artifact_1 = require("./artifact");
class ArtifactServiceClientJSON {
constructor(rpc) {
this.rpc = rpc;
this.CreateArtifact.bind(this);
this.FinalizeArtifact.bind(this);
this.ListArtifacts.bind(this);
this.GetSignedArtifactURL.bind(this);
this.DeleteArtifact.bind(this);
}
CreateArtifact(request) {
const data = artifact_1.CreateArtifactRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "CreateArtifact", "application/json", data);
return promise.then((data) => artifact_1.CreateArtifactResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
FinalizeArtifact(request) {
const data = artifact_1.FinalizeArtifactRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "FinalizeArtifact", "application/json", data);
return promise.then((data) => artifact_1.FinalizeArtifactResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
ListArtifacts(request) {
const data = artifact_1.ListArtifactsRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "ListArtifacts", "application/json", data);
return promise.then((data) => artifact_1.ListArtifactsResponse.fromJson(data, { ignoreUnknownFields: true }));
}
GetSignedArtifactURL(request) {
const data = artifact_1.GetSignedArtifactURLRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "GetSignedArtifactURL", "application/json", data);
return promise.then((data) => artifact_1.GetSignedArtifactURLResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
DeleteArtifact(request) {
const data = artifact_1.DeleteArtifactRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "DeleteArtifact", "application/json", data);
return promise.then((data) => artifact_1.DeleteArtifactResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
}
exports.ArtifactServiceClientJSON = ArtifactServiceClientJSON;
class ArtifactServiceClientProtobuf {
constructor(rpc) {
this.rpc = rpc;
this.CreateArtifact.bind(this);
this.FinalizeArtifact.bind(this);
this.ListArtifacts.bind(this);
this.GetSignedArtifactURL.bind(this);
this.DeleteArtifact.bind(this);
}
CreateArtifact(request) {
const data = artifact_1.CreateArtifactRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "CreateArtifact", "application/protobuf", data);
return promise.then((data) => artifact_1.CreateArtifactResponse.fromBinary(data));
}
FinalizeArtifact(request) {
const data = artifact_1.FinalizeArtifactRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "FinalizeArtifact", "application/protobuf", data);
return promise.then((data) => artifact_1.FinalizeArtifactResponse.fromBinary(data));
}
ListArtifacts(request) {
const data = artifact_1.ListArtifactsRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "ListArtifacts", "application/protobuf", data);
return promise.then((data) => artifact_1.ListArtifactsResponse.fromBinary(data));
}
GetSignedArtifactURL(request) {
const data = artifact_1.GetSignedArtifactURLRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "GetSignedArtifactURL", "application/protobuf", data);
return promise.then((data) => artifact_1.GetSignedArtifactURLResponse.fromBinary(data));
}
DeleteArtifact(request) {
const data = artifact_1.DeleteArtifactRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "DeleteArtifact", "application/protobuf", data);
return promise.then((data) => artifact_1.DeleteArtifactResponse.fromBinary(data));
}
}
exports.ArtifactServiceClientProtobuf = ArtifactServiceClientProtobuf;
//# sourceMappingURL=artifact.twirp-client.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"artifact.twirp-client.js","sourceRoot":"","sources":["../../../../../src/generated/results/api/v1/artifact.twirp-client.ts"],"names":[],"mappings":";;;AAAA,yCAWoB;AA+BpB,MAAa,yBAAyB;IAEpC,YAAY,GAAQ;QAClB,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QACf,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACjC,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC9B,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACrC,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACjC,CAAC;IACD,cAAc,CACZ,OAA8B;QAE9B,MAAM,IAAI,GAAG,gCAAqB,CAAC,MAAM,CAAC,OAAO,EAAE;YACjD,iBAAiB,EAAE,IAAI;YACvB,iBAAiB,EAAE,KAAK;SACzB,CAAC,CAAC;QACH,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,gBAAgB,EAChB,kBAAkB,EAClB,IAAc,CACf,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,iCAAsB,CAAC,QAAQ,CAAC,IAAW,EAAE;YAC3C,mBAAmB,EAAE,IAAI;SAC1B,CAAC,CACH,CAAC;IACJ,CAAC;IAED,gBAAgB,CACd,OAAgC;QAEhC,MAAM,IAAI,GAAG,kCAAuB,CAAC,MAAM,CAAC,OAAO,EAAE;YACnD,iBAAiB,EAAE,IAAI;YACvB,iBAAiB,EAAE,KAAK;SACzB,CAAC,CAAC;QACH,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,kBAAkB,EAClB,kBAAkB,EAClB,IAAc,CACf,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,mCAAwB,CAAC,QAAQ,CAAC,IAAW,EAAE;YAC7C,mBAAmB,EAAE,IAAI;SAC1B,CAAC,CACH,CAAC;IACJ,CAAC;IAED,aAAa,CAAC,OAA6B;QACzC,MAAM,IAAI,GAAG,+BAAoB,CAAC,MAAM,CAAC,OAAO,EAAE;YAChD,iBAAiB,EAAE,IAAI;YACvB,iBAAiB,EAAE,KAAK;SACzB,CAAC,CAAC;QACH,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,eAAe,EACf,kBAAkB,EAClB,IAAc,CACf,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,gCAAqB,CAAC,QAAQ,CAAC,IAAW,EAAE,EAAE,mBAAmB,EAAE,IAAI,EAAE,CAAC,CAC3E,CAAC;IACJ,CAAC;IAED,oBAAoB,CAClB,OAAoC;QAEpC,MAAM,IAAI,GAAG,sCAA2B,CAAC,MAAM,CAAC,OAAO,EAAE;YACvD,iBAAiB,EAAE,IAAI;YACvB,iBAAiB,EAAE,KAAK;SACzB,CAAC,CAAC;QACH,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,sBAAsB,EACtB,kBAAkB,EAClB,IAAc,CACf,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,uCAA4B,CAAC,QAAQ,CAAC,IAAW,EAAE;YACjD,mBAAmB,EAAE,IAAI;SAC1B,CAAC,CACH,CAAC;IACJ,CAAC;IAED,cAAc,CACZ,OAA8B;QAE9B,MAAM,IAAI,GAAG,gCAAqB,CAAC,MAAM,CAAC,OAAO,EAAE;YACjD,iBAAiB,EAAE,IAAI;YACvB,iBAAiB,EAAE,KAAK;SACzB,CAAC,CAAC;QACH,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,gBAAgB,EAChB,kBAAkB,EAClB,IAAc,CACf,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,iCAAsB,CAAC,QAAQ,CAAC,IAAW,EAAE;YAC3C,mBAAmB,EAAE,IAAI;SAC1B,CAAC,CACH,CAAC;IACJ,CAAC;CACF;AAzGD,8DAyGC;AAED,MAAa,6BAA6B;IAExC,YAAY,GAAQ;QAClB,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QACf,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACjC,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC9B,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACrC,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACjC,CAAC;IACD,cAAc,CACZ,OAA8B;QAE9B,MAAM,IAAI,GAAG,gCAAqB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QACrD,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,gBAAgB,EAChB,sBAAsB,EACtB,IAAI,CACL,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,iCAAsB,CAAC,UAAU,CAAC,IAAkB,CAAC,CACtD,CAAC;IACJ,CAAC;IAED,gBAAgB,CACd,OAAgC;QAEhC,MAAM,IAAI,GAAG,kCAAuB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QACvD,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,kBAAkB,EAClB,sBAAsB,EACtB,IAAI,CACL,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,mCAAwB,CAAC,UAAU,CAAC,IAAkB,CAAC,CACxD,CAAC;IACJ,CAAC;IAED,aAAa,CAAC,OAA6B;QACzC,MAAM,IAAI,GAAG,+BAAoB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QACpD,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,eAAe,EACf,sBAAsB,EACtB,IAAI,CACL,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,gCAAqB,CAAC,UAAU,CAAC,IAAkB,CAAC,CACrD,CAAC;IACJ,CAAC;IAED,oBAAoB,CAClB,OAAoC;QAEpC,MAAM,IAAI,GAAG,sCAA2B,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QAC3D,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,sBAAsB,EACtB,sBAAsB,EACtB,IAAI,CACL,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,uCAA4B,CAAC,UAAU,CAAC,IAAkB,CAAC,CAC5D,CAAC;IACJ,CAAC;IAED,cAAc,CACZ,OAA8B;QAE9B,MAAM,IAAI,GAAG,gCAAqB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QACrD,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,CAAC,OAAO,CAC9B,+CAA+C,EAC/C,gBAAgB,EAChB,sBAAsB,EACtB,IAAI,CACL,CAAC;QACF,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAC3B,iCAAsB,CAAC,UAAU,CAAC,IAAkB,CAAC,CACtD,CAAC;IACJ,CAAC;CACF;AAlFD,sEAkFC"}

Some files were not shown because too many files have changed in this diff Show More