Compare commits

..

2 Commits

Author SHA1 Message Date
Henry Mercer
4d8b358273 Disable SIP disablement check 2025-10-28 12:27:34 +00:00
Henry Mercer
f336c09493 Add testing trigger 2025-10-28 12:26:18 +00:00
72 changed files with 74812 additions and 72018 deletions

View File

@@ -16,9 +16,9 @@ runs:
shell: bash
- name: Set up Python
uses: actions/setup-python@v6
uses: actions/setup-python@v5
with:
python-version: '3.12'
python-version: 3.12
- name: Install dependencies
run: |

View File

@@ -79,7 +79,7 @@ jobs:
output: ${{ runner.temp }}/results
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}-zstd-bundle.sarif
path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -67,7 +67,7 @@ jobs:
output: ${{ runner.temp }}/results
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -78,7 +78,7 @@ jobs:
output: ${{ runner.temp }}/results
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -85,7 +85,7 @@ jobs:
with:
output: ${{ runner.temp }}/results
- name: Upload SARIF
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -64,7 +64,7 @@ jobs:
with:
output: ${{ runner.temp }}/results
- name: Upload SARIF
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -9,9 +9,6 @@ env:
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v*
pull_request:
types:
- opened
@@ -56,42 +53,8 @@ jobs:
fail-fast: false
matrix:
include:
- os: macos-latest
version: stable-v2.17.6
- os: ubuntu-latest
version: stable-v2.17.6
- os: macos-latest
version: stable-v2.18.4
- os: ubuntu-latest
version: stable-v2.18.4
- os: macos-latest
version: stable-v2.19.4
- os: ubuntu-latest
version: stable-v2.19.4
- os: macos-latest
version: stable-v2.20.7
- os: ubuntu-latest
version: stable-v2.20.7
- os: macos-latest
version: stable-v2.21.4
- os: ubuntu-latest
version: stable-v2.21.4
- os: macos-latest
version: stable-v2.22.4
- os: ubuntu-latest
version: stable-v2.22.4
- os: macos-latest
version: default
- os: ubuntu-latest
version: default
- os: macos-latest
version: linked
- os: ubuntu-latest
version: linked
- os: macos-latest
version: nightly-latest
- os: ubuntu-latest
version: nightly-latest
name: Multi-language repository
if: github.triggering_actor != 'dependabot[bot]'
permissions:
@@ -185,6 +148,3 @@ jobs:
echo "Did not create a database for Swift, or created it in the wrong location."
exit 1
fi
env:
CODEQL_ACTION_RESOLVE_SUPPORTED_LANGUAGES_USING_CLI: true
CODEQL_ACTION_TEST_MODE: true

View File

@@ -83,7 +83,7 @@ jobs:
post-processed-sarif-path: ${{ runner.temp }}/post-processed
- name: Upload security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: |
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
@@ -91,14 +91,14 @@ jobs:
retention-days: 7
- name: Upload quality SARIF
if: contains(matrix.analysis-kinds, 'code-quality')
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: |
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
path: ${{ runner.temp }}/results/javascript.quality.sarif
retention-days: 7
- name: Upload post-processed SARIF
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json

View File

@@ -56,7 +56,7 @@ jobs:
use-all-platform-bundle: 'false'
setup-kotlin: 'true'
- name: Set up Ruby
uses: ruby/setup-ruby@d5126b9b3579e429dd52e51e68624dda2e05be25 # v1.267.0
uses: ruby/setup-ruby@ab177d40ee5483edb974554986f56b33477e21d0 # v1.265.0
with:
ruby-version: 2.6
- name: Install Code Scanning integration

View File

@@ -15,7 +15,7 @@ defaults:
jobs:
check-expected-release-files:
runs-on: ubuntu-slim
runs-on: ubuntu-latest
permissions:
contents: read

View File

@@ -81,7 +81,7 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [ubuntu-22.04,ubuntu-24.04,windows-2022,windows-2025,macos-14,macos-15]
os: [ubuntu-22.04,ubuntu-24.04,windows-2022,windows-2025,macos-13,macos-14,macos-15]
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
runs-on: ${{ matrix.os }}

View File

@@ -79,7 +79,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download all artifacts
uses: actions/download-artifact@v6
uses: actions/download-artifact@v5
- name: Check expected artifacts exist
run: |
LANGUAGES="cpp csharp go java javascript python"

View File

@@ -73,7 +73,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download all artifacts
uses: actions/download-artifact@v6
uses: actions/download-artifact@v5
- name: Check expected artifacts exist
run: |
VERSIONS="stable-v2.20.3 default linked nightly-latest"

View File

@@ -16,7 +16,7 @@ permissions:
jobs:
sizeup:
name: Label PR with size
runs-on: ubuntu-slim
runs-on: ubuntu-latest
steps:
- name: Run sizeup

View File

@@ -24,7 +24,7 @@ defaults:
jobs:
merge-back:
runs-on: ubuntu-slim
runs-on: ubuntu-latest
environment: Automation
if: github.repository == 'github/codeql-action'
env:
@@ -48,9 +48,6 @@ jobs:
with:
fetch-depth: 0 # ensure we have all tags and can push commits
- uses: actions/setup-node@v6
- uses: actions/setup-python@v6
with:
python-version: '3.12'
- name: Update git config
run: |

View File

@@ -29,7 +29,7 @@ defaults:
jobs:
prepare:
name: "Prepare release"
runs-on: ubuntu-slim
runs-on: ubuntu-latest
if: github.repository == 'github/codeql-action'
permissions:

View File

@@ -1,10 +1,8 @@
name: 'Publish Immutable Action Version'
on:
push:
tags:
# Match version tags, but not the major version tags.
- 'v[0-9]+.**'
release:
types: [published]
defaults:
run:
@@ -12,16 +10,30 @@ defaults:
jobs:
publish:
runs-on: ubuntu-slim
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
packages: write
steps:
- name: Checkout repository
- name: Check release name
id: check
env:
RELEASE_NAME: ${{ github.event.release.name }}
run: |
echo "Release name: ${{ github.event.release.name }}"
if [[ $RELEASE_NAME == v* ]]; then
echo "This is a CodeQL Action release. Create an Immutable Action"
echo "is-action-release=true" >> $GITHUB_OUTPUT
else
echo "This is a CodeQL Bundle release. Do not create an Immutable Action"
echo "is-action-release=false" >> $GITHUB_OUTPUT
fi
- name: Checking out
if: steps.check.outputs.is-action-release == 'true'
uses: actions/checkout@v5
- name: Publish immutable release
- name: Publish
if: steps.check.outputs.is-action-release == 'true'
id: publish
uses: actions/publish-immutable-action@v0.0.4

View File

@@ -1,18 +0,0 @@
import os
import re
# Get the PR number from the PR URL.
pr_number = os.environ['PR_URL'].split('/')[-1]
changelog_note = f"- Update default CodeQL bundle version to {os.environ['CLI_VERSION']}. [#{pr_number}]({os.environ['PR_URL']})"
# If the "[UNRELEASED]" section starts with "no user facing changes", remove that line.
with open('CHANGELOG.md', 'r') as f:
changelog = f.read()
changelog = changelog.replace('## [UNRELEASED]\n\nNo user facing changes.', '## [UNRELEASED]\n')
# Add the changelog note to the bottom of the "[UNRELEASED]" section.
changelog = re.sub(r'\n## (\d+\.\d+\.\d+)', f'{changelog_note}\n\n## \\1', changelog, count=1)
with open('CHANGELOG.md', 'w') as f:
f.write(changelog)

View File

@@ -29,7 +29,7 @@ fi
echo "Getting checks for $GITHUB_SHA"
# Ignore any checks with "https://", CodeQL, LGTM, Update, and ESLint checks.
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs.[] | select(.conclusion != "skipped") | .name | select(contains("https://") or . == "CodeQL" or . == "Dependabot" or . == "check-expected-release-files" or contains("Update") or contains("ESLint") or contains("update") or contains("test-setup-python-scripts") or . == "Agent" or . == "Cleanup artifacts" or . == "Prepare" or . == "Upload results" | not)] | unique | sort')"
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs.[] | select(.conclusion != "skipped") | .name | select(contains("https://") or . == "CodeQL" or . == "Dependabot" or . == "check-expected-release-files" or contains("Update") or contains("ESLint") or contains("update") or contains("test-setup-python-scripts") | not)] | unique | sort')"
echo "$CHECKS" | jq

View File

@@ -20,7 +20,7 @@ defaults:
jobs:
update-bundle:
if: github.event.release.prerelease && startsWith(github.event.release.tag_name, 'codeql-bundle-')
runs-on: ubuntu-slim
runs-on: ubuntu-latest
permissions:
contents: write # needed to push commits
pull-requests: write # needed to create pull requests
@@ -40,11 +40,6 @@ jobs:
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.12'
- name: Set up Node.js
uses: actions/setup-node@v6
with:
@@ -83,8 +78,28 @@ jobs:
echo "PR_URL=$pr_url" | tee -a "$GITHUB_ENV"
- name: Create changelog note
shell: python
run: |
python .github/workflows/script/bundle_changelog.py
import os
import re
# Get the PR number from the PR URL.
pr_number = os.environ['PR_URL'].split('/')[-1]
changelog_note = f"- Update default CodeQL bundle version to {os.environ['CLI_VERSION']}. [#{pr_number}]({os.environ['PR_URL']})"
# If the "[UNRELEASED]" section starts with "no user facing changes", remove that line.
# Use perl to avoid having to escape the newline character.
with open('CHANGELOG.md', 'r') as f:
changelog = f.read()
changelog = changelog.replace('## [UNRELEASED]\n\nNo user facing changes.', '## [UNRELEASED]\n')
# Add the changelog note to the bottom of the "[UNRELEASED]" section.
changelog = re.sub(r'\n## (\d+\.\d+\.\d+)', f'{changelog_note}\n\n## \\1', changelog, count=1)
with open('CHANGELOG.md', 'w') as f:
f.write(changelog)
- name: Push changelog note
run: |

View File

@@ -26,7 +26,7 @@ jobs:
update:
timeout-minutes: 45
runs-on: ubuntu-slim
runs-on: ubuntu-latest
if: github.event_name == 'workflow_dispatch'
needs: [prepare]
env:
@@ -77,7 +77,7 @@ jobs:
backport:
timeout-minutes: 45
runs-on: ubuntu-slim
runs-on: ubuntu-latest
environment: Automation
needs: [prepare]
if: ${{ (github.event_name == 'push') && needs.prepare.outputs.backport_target_branches != '[]' }}

View File

@@ -9,7 +9,7 @@ jobs:
update-supported-enterprise-server-versions:
name: Update Supported Enterprise Server Versions
timeout-minutes: 45
runs-on: ubuntu-slim
runs-on: ubuntu-latest
if: github.repository == 'github/codeql-action'
permissions:
contents: write # needed to push commits

View File

@@ -6,18 +6,6 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th
No user facing changes.
## v4.31.3 - 05 Nov 2025
This release rolls back 4.31.2 due to issues with that release. It is identical to 0.0.0.
## 4.31.2 - 30 Oct 2025
No user facing changes.
## 4.31.1 - 30 Oct 2025
- The `add-snippets` input has been removed from the `analyze` action. This input has been deprecated since CodeQL Action 3.26.4 in August 2024 when this removal was announced.
## 4.31.0 - 24 Oct 2025
- Bump minimum CodeQL bundle version to 2.17.6. [#3223](https://github.com/github/codeql-action/pull/3223)
@@ -1077,4 +1065,3 @@ No user facing changes.
- Add this changelog file. [#507](https://github.com/github/codeql-action/pull/507)
- Improve grouping of analysis logs. Add a new log group containing a summary of metrics and diagnostics, if they were produced by CodeQL builtin queries. [#515](https://github.com/github/codeql-action/pull/515)
- Add metrics and diagnostics summaries from custom query suites to the analysis summary log group. [#532](https://github.com/github/codeql-action/pull/532)

View File

@@ -32,10 +32,14 @@ inputs:
and 13GB for macOS).
required: false
add-snippets:
description: Does not have any effect.
description: Specify whether or not to add code snippets to the output sarif file.
required: false
default: "false"
deprecationMessage: >-
The input "add-snippets" has been removed and no longer has any effect.
The input "add-snippets" is deprecated and will be removed on the first release in August 2025.
When this input is set to true it is expected to add code snippets with an alert to the SARIF file.
However, since Code Scanning ignores code snippets provided as part of a SARIF file this is currently
a no operation. No alternative is available.
skip-queries:
description: If this option is set, the CodeQL database will be built but no queries will be run on it. Thus, no results will be produced.
required: false

13758
lib/analyze-action-post.js generated

File diff suppressed because one or more lines are too long

13165
lib/analyze-action.js generated

File diff suppressed because it is too large Load Diff

5490
lib/autobuild-action.js generated

File diff suppressed because it is too large Load Diff

22279
lib/init-action-post.js generated

File diff suppressed because one or more lines are too long

13254
lib/init-action.js generated

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

12702
lib/setup-codeql-action.js generated

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

5220
lib/start-proxy-action.js generated

File diff suppressed because it is too large Load Diff

12710
lib/upload-lib.js generated

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

12784
lib/upload-sarif-action.js generated

File diff suppressed because it is too large Load Diff

1305
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "codeql",
"version": "4.31.4",
"version": "4.31.1",
"private": true,
"description": "CodeQL action",
"scripts": {
@@ -24,20 +24,22 @@
},
"license": "MIT",
"dependencies": {
"@actions/artifact": "^4.0.0",
"@actions/artifact": "^2.3.1",
"@actions/artifact-legacy": "npm:@actions/artifact@^1.1.2",
"@actions/cache": "^4.1.0",
"@actions/core": "^1.11.1",
"@actions/exec": "^1.1.1",
"@actions/github": "^6.0.0",
"@actions/glob": "^0.5.0",
"@actions/http-client": "^3.0.0",
"@actions/io": "^2.0.0",
"@actions/http-client": "^2.2.3",
"@actions/io": "^1.1.3",
"@actions/tool-cache": "^2.0.2",
"@octokit/plugin-retry": "^6.0.0",
"@octokit/request-error": "^7.0.2",
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
"archiver": "^7.0.1",
"console-log-level": "^1.4.1",
"del": "^8.0.0",
"fast-deep-equal": "^3.1.3",
"follow-redirects": "^1.15.11",
"get-folder-size": "^5.0.0",
@@ -45,28 +47,29 @@
"jsonschema": "1.4.1",
"long": "^5.3.2",
"node-forge": "^1.3.1",
"octokit": "^5.0.5",
"octokit": "^5.0.4",
"semver": "^7.7.3",
"uuid": "^13.0.0"
},
"devDependencies": {
"@ava/typescript": "6.0.0",
"@eslint/compat": "^1.4.1",
"@eslint/compat": "^1.4.0",
"@eslint/eslintrc": "^3.3.1",
"@eslint/js": "^9.39.0",
"@eslint/js": "^9.38.0",
"@microsoft/eslint-formatter-sarif": "^3.1.0",
"@octokit/types": "^16.0.0",
"@types/archiver": "^7.0.0",
"@octokit/types": "^15.0.0",
"@types/archiver": "^6.0.3",
"@types/console-log-level": "^1.4.5",
"@types/follow-redirects": "^1.14.4",
"@types/js-yaml": "^4.0.9",
"@types/node": "20.19.9",
"@types/node-forge": "^1.3.14",
"@types/semver": "^7.7.1",
"@types/sinon": "^17.0.4",
"@typescript-eslint/eslint-plugin": "^8.46.3",
"@typescript-eslint/eslint-plugin": "^8.46.1",
"@typescript-eslint/parser": "^8.41.0",
"ava": "^6.4.1",
"esbuild": "^0.25.12",
"esbuild": "^0.25.11",
"eslint": "^8.57.1",
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-filenames": "^1.3.2",

View File

@@ -27,7 +27,7 @@ steps:
output: ${{ runner.temp }}/results
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}-zstd-bundle.sarif
path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -12,7 +12,7 @@ steps:
output: "${{ runner.temp }}/results"
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -25,7 +25,7 @@ steps:
output: "${{ runner.temp }}/results"
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -17,7 +17,7 @@ steps:
with:
output: "${{ runner.temp }}/results"
- name: Upload SARIF
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -11,7 +11,7 @@ steps:
with:
output: "${{ runner.temp }}/results"
- name: Upload SARIF
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -39,7 +39,7 @@ steps:
post-processed-sarif-path: "${{ runner.temp }}/post-processed"
- name: Upload security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: |
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
@@ -47,14 +47,14 @@ steps:
retention-days: 7
- name: Upload quality SARIF
if: contains(matrix.analysis-kinds, 'code-quality')
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: |
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
path: "${{ runner.temp }}/results/javascript.quality.sarif"
retention-days: 7
- name: Upload post-processed SARIF
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json

View File

@@ -4,7 +4,7 @@ description: "Tests using RuboCop to analyze a multi-language repository and the
versions: ["default"]
steps:
- name: Set up Ruby
uses: ruby/setup-ruby@d5126b9b3579e429dd52e51e68624dda2e05be25 # v1.267.0
uses: ruby/setup-ruby@ab177d40ee5483edb974554986f56b33477e21d0 # v1.265.0
with:
ruby-version: 2.6
- name: Install Code Scanning integration

View File

@@ -9,15 +9,9 @@ if [ "$GITHUB_ACTIONS" = "true" ]; then
fi
# Check if npm install is likely needed before proceeding
if [ ! -d node_modules ]; then
echo "Running 'npm install' because 'node_modules' directory is missing."
npm install
elif [ package.json -nt package-lock.json ]; then
echo "Running 'npm install' because 'package-lock.json' appears to be outdated."
npm install
elif [ package-lock.json -nt node_modules/.package-lock.json ]; then
echo "Running 'npm install' because 'node_modules/.package-lock.json' appears to be outdated."
if [ ! -d node_modules ] || [ package-lock.json -nt node_modules/.package-lock.json ]; then
echo "Running 'npm install' because 'node_modules/.package-lock.json' appears to be outdated..."
npm install
else
echo "Skipping 'npm install' because everything appears to be up-to-date."
echo "Skipping 'npm install' because 'node_modules/.package-lock.json' appears to be up-to-date."
fi

View File

@@ -78,7 +78,7 @@ test("analyze action with RAM & threads from environment variables", async (t) =
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=4992");
t.assert(runQueriesStub.calledOnce);
t.deepEqual(runQueriesStub.firstCall.args[2], "--threads=-1");
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=4992");
});
});

View File

@@ -76,7 +76,7 @@ test("analyze action with RAM & threads from action inputs", async (t) => {
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=3012");
t.assert(runQueriesStub.calledOnce);
t.deepEqual(runQueriesStub.firstCall.args[2], "--threads=-1");
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=3012");
});
});

View File

@@ -324,16 +324,10 @@ async function run() {
);
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
// Warn if the removed `add-snippets` input is used.
if (actionsUtil.getOptionalInput("add-snippets") !== undefined) {
logger.warning(
"The `add-snippets` input has been removed and no longer has any effect.",
);
}
runStats = await runQueries(
outputDir,
memory,
util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")),
threads,
diffRangePackDir,
actionsUtil.getOptionalInput("category"),
@@ -438,11 +432,14 @@ async function run() {
// Store dependency cache(s) if dependency caching is enabled.
if (shouldStoreCache(config.dependencyCachingEnabled)) {
dependencyCacheResults = await uploadDependencyCaches(
const minimizeJavaJars = await features.getValue(
Feature.JavaMinimizeDependencyJars,
codeql,
features,
);
dependencyCacheResults = await uploadDependencyCaches(
config,
logger,
minimizeJavaJars,
);
}

View File

@@ -37,6 +37,7 @@ test("status report fields", async (t) => {
setupActionsVars(tmpDir, tmpDir);
const memoryFlag = "";
const addSnippetsFlag = "";
const threadsFlag = "";
sinon.stub(uploadLib, "validateSarifFileSchema");
@@ -102,6 +103,7 @@ test("status report fields", async (t) => {
const statusReport = await runQueries(
tmpDir,
memoryFlag,
addSnippetsFlag,
threadsFlag,
undefined,
undefined,

View File

@@ -3,6 +3,7 @@ import * as path from "path";
import { performance } from "perf_hooks";
import * as io from "@actions/io";
import * as del from "del";
import * as yaml from "js-yaml";
import { getTemporaryDirectory, PullRequestBranches } from "./actions-util";
@@ -436,6 +437,7 @@ export function addSarifExtension(
export async function runQueries(
sarifFolder: string,
memoryFlag: string,
addSnippetsFlag: string,
threadsFlag: string,
diffRangePackDir: string | undefined,
automationDetailsId: string | undefined,
@@ -625,6 +627,7 @@ export async function runQueries(
databasePath,
queries,
sarifFile,
addSnippetsFlag,
threadsFlag,
enableDebugLogging ? "-vv" : "-v",
sarifRunPropertyFlag,
@@ -668,7 +671,7 @@ export async function runFinalize(
logger: Logger,
): Promise<DatabaseCreationTimings> {
try {
await fs.promises.rm(outputDir, { force: true, recursive: true });
await del.deleteAsync(outputDir, { force: true });
} catch (error: any) {
if (error?.code !== "ENOENT") {
throw error;

View File

@@ -169,32 +169,4 @@ test("wrapApiConfigurationError correctly wraps specific configuration errors",
res,
new util.ConfigurationError("Resource not accessible by integration"),
);
// Enablement errors.
const enablementErrorMessages = [
"Code Security must be enabled for this repository to use code scanning",
"Advanced Security must be enabled for this repository to use code scanning",
"Code Scanning is not enabled for this repository. Please enable code scanning in the repository settings.",
];
const transforms = [
(msg: string) => msg,
(msg: string) => msg.toLowerCase(),
(msg: string) => msg.toLocaleUpperCase(),
];
for (const enablementErrorMessage of enablementErrorMessages) {
for (const transform of transforms) {
const enablementError = new util.HTTPError(
transform(enablementErrorMessage),
403,
);
res = api.wrapApiConfigurationError(enablementError);
t.deepEqual(
res,
new util.ConfigurationError(
api.getFeatureEnablementError(enablementError.message),
),
);
}
}
});

View File

@@ -1,6 +1,7 @@
import * as core from "@actions/core";
import * as githubUtils from "@actions/github/lib/utils";
import * as retry from "@octokit/plugin-retry";
import consoleLogLevel from "console-log-level";
import { getActionVersion, getRequiredInput } from "./actions-util";
import { Logger } from "./logging";
@@ -49,12 +50,7 @@ function createApiClientWithDetails(
githubUtils.getOctokitOptions(auth, {
baseUrl: apiDetails.apiURL,
userAgent: `CodeQL-Action/${getActionVersion()}`,
log: {
debug: core.debug,
info: core.info,
warn: core.warning,
error: core.error,
},
log: consoleLogLevel({ level: "debug" }),
}),
);
}
@@ -283,20 +279,6 @@ export async function getRepositoryProperties(repositoryNwo: RepositoryNwo) {
});
}
function isEnablementError(msg: string) {
return [
/Code Security must be enabled/i,
/Advanced Security must be enabled/i,
/Code Scanning is not enabled/i,
].some((pattern) => pattern.test(msg));
}
// TODO: Move to `error-messages.ts` after refactoring import order to avoid cycle
// since `error-messages.ts` currently depends on this file.
export function getFeatureEnablementError(message: string): string {
return `Please verify that the necessary features are enabled: ${message}`;
}
export function wrapApiConfigurationError(e: unknown) {
const httpError = asHTTPError(e);
if (httpError !== undefined) {
@@ -318,11 +300,6 @@ export function wrapApiConfigurationError(e: unknown) {
"Please check that your token is valid and has the required permissions: contents: read, security-events: write",
);
}
if (httpError.status === 403 && isEnablementError(httpError.message)) {
return new ConfigurationError(
getFeatureEnablementError(httpError.message),
);
}
if (httpError.status === 429) {
return new ConfigurationError("API rate limit exceeded");
}

View File

@@ -1,5 +1,3 @@
import * as crypto from "crypto";
import * as core from "@actions/core";
import { getOptionalInput, isDefaultSetup } from "./actions-util";
@@ -73,33 +71,6 @@ export function getCachingKind(input: string | undefined): CachingKind {
}
}
// The length to which `createCacheKeyHash` truncates hash strings.
export const cacheKeyHashLength = 16;
/**
* Creates a SHA-256 hash of the cache key components to ensure uniqueness
* while keeping the cache key length manageable.
*
* @param components Object containing all components that should influence cache key uniqueness
* @returns A short SHA-256 hash (first 16 characters) of the components
*/
export function createCacheKeyHash(components: Record<string, any>): string {
// From https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify
//
// "Properties are visited using the same algorithm as Object.keys(), which
// has a well-defined order and is stable across implementations. For example,
// JSON.stringify on the same object will always produce the same string, and
// JSON.parse(JSON.stringify(obj)) would produce an object with the same key
// ordering as the original (assuming the object is completely
// JSON-serializable)."
const componentsJson = JSON.stringify(components);
return crypto
.createHash("sha256")
.update(componentsJson)
.digest("hex")
.substring(0, cacheKeyHashLength);
}
/** Determines whether dependency caching is enabled. */
export function getDependencyCachingEnabled(): CachingKind {
// If the workflow specified something always respect that

View File

@@ -5,6 +5,7 @@ import * as toolrunner from "@actions/exec/lib/toolrunner";
import * as io from "@actions/io";
import * as toolcache from "@actions/tool-cache";
import test, { ExecutionContext } from "ava";
import * as del from "del";
import * as yaml from "js-yaml";
import nock from "nock";
import * as sinon from "sinon";
@@ -556,7 +557,7 @@ const injectedConfigMacro = test.macro({
const augmentedConfig = yaml.load(fs.readFileSync(configFile, "utf8"));
t.deepEqual(augmentedConfig, expectedConfig);
await fs.promises.rm(configFile, { force: true });
await del.deleteAsync(configFile, { force: true });
});
},
@@ -1045,7 +1046,7 @@ test("Avoids duplicating --overwrite flag if specified in CODEQL_ACTION_EXTRA_OP
);
t.truthy(configArg, "Should have injected a codescanning config");
const configFile = configArg!.split("=")[1];
await fs.promises.rm(configFile, { force: true });
await del.deleteAsync(configFile, { force: true });
});
export function stubToolRunnerConstructor(

View File

@@ -167,6 +167,7 @@ export interface CodeQL {
databasePath: string,
querySuitePaths: string[] | undefined,
sarifFile: string,
addSnippetsFlag: string,
threadsFlag: string,
verbosityFlag: string | undefined,
sarifRunPropertyFlag: string | undefined,
@@ -816,6 +817,7 @@ export async function getCodeQLForCmd(
databasePath: string,
querySuitePaths: string[] | undefined,
sarifFile: string,
addSnippetsFlag: string,
threadsFlag: string,
verbosityFlag: string,
sarifRunPropertyFlag: string | undefined,
@@ -834,6 +836,7 @@ export async function getCodeQLForCmd(
"--format=sarif-latest",
verbosityFlag,
`--output=${sarifFile}`,
addSnippetsFlag,
"--print-diagnostics-summary",
"--print-metrics-summary",
"--sarif-add-baseline-file-info",

View File

@@ -873,62 +873,71 @@ const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
expectedLanguages: ["javascript"],
},
].forEach((args) => {
test(`getLanguages: ${args.name}`, async (t) => {
const mockRequest = mockLanguagesInRepo(args.languagesInRepository);
const stubExtractorEntry = {
extractor_root: "",
};
const codeQL = createStubCodeQL({
betterResolveLanguages: (options) =>
Promise.resolve({
aliases: {
"c#": KnownLanguage.csharp,
c: KnownLanguage.cpp,
kotlin: KnownLanguage.java,
typescript: KnownLanguage.javascript,
},
extractors: {
cpp: [stubExtractorEntry],
csharp: [stubExtractorEntry],
java: [stubExtractorEntry],
javascript: [stubExtractorEntry],
python: [stubExtractorEntry],
...(options?.filterToLanguagesWithQueries
? {}
: {
html: [stubExtractorEntry],
}),
},
}),
for (const resolveSupportedLanguagesUsingCli of [true, false]) {
test(`getLanguages${resolveSupportedLanguagesUsingCli ? " (supported languages via CLI)" : ""}: ${args.name}`, async (t) => {
const features = createFeatures(
resolveSupportedLanguagesUsingCli
? [Feature.ResolveSupportedLanguagesUsingCli]
: [],
);
const mockRequest = mockLanguagesInRepo(args.languagesInRepository);
const stubExtractorEntry = {
extractor_root: "",
};
const codeQL = createStubCodeQL({
betterResolveLanguages: (options) =>
Promise.resolve({
aliases: {
"c#": KnownLanguage.csharp,
c: KnownLanguage.cpp,
kotlin: KnownLanguage.java,
typescript: KnownLanguage.javascript,
},
extractors: {
cpp: [stubExtractorEntry],
csharp: [stubExtractorEntry],
java: [stubExtractorEntry],
javascript: [stubExtractorEntry],
python: [stubExtractorEntry],
...(options?.filterToLanguagesWithQueries
? {}
: {
html: [stubExtractorEntry],
}),
},
}),
});
if (args.expectedLanguages) {
// happy path
const actualLanguages = await configUtils.getLanguages(
codeQL,
args.languagesInput,
mockRepositoryNwo,
".",
features,
mockLogger,
);
t.deepEqual(actualLanguages.sort(), args.expectedLanguages.sort());
} else {
// there is an error
await t.throwsAsync(
async () =>
await configUtils.getLanguages(
codeQL,
args.languagesInput,
mockRepositoryNwo,
".",
features,
mockLogger,
),
{ message: args.expectedError },
);
}
t.deepEqual(mockRequest.called, args.expectedApiCall);
});
if (args.expectedLanguages) {
// happy path
const actualLanguages = await configUtils.getLanguages(
codeQL,
args.languagesInput,
mockRepositoryNwo,
".",
mockLogger,
);
t.deepEqual(actualLanguages.sort(), args.expectedLanguages.sort());
} else {
// there is an error
await t.throwsAsync(
async () =>
await configUtils.getLanguages(
codeQL,
args.languagesInput,
mockRepositoryNwo,
".",
mockLogger,
),
{ message: args.expectedError },
);
}
t.deepEqual(mockRequest.called, args.expectedApiCall);
});
}
});
for (const { displayName, language, feature } of [

View File

@@ -34,7 +34,6 @@ import {
OverlayDatabaseMode,
} from "./overlay-database-utils";
import { RepositoryNwo } from "./repository";
import { ToolsFeature } from "./tools-features";
import { downloadTrapCaches } from "./trap-caching";
import {
GitHubVersion,
@@ -178,10 +177,12 @@ export interface Config {
export async function getSupportedLanguageMap(
codeql: CodeQL,
features: FeatureEnablement,
logger: Logger,
): Promise<Record<string, string>> {
const resolveSupportedLanguagesUsingCli = await codeql.supportsFeature(
ToolsFeature.BuiltinExtractorsSpecifyDefaultQueries,
const resolveSupportedLanguagesUsingCli = await features.getValue(
Feature.ResolveSupportedLanguagesUsingCli,
codeql,
);
const resolveResult = await codeql.betterResolveLanguages({
filterToLanguagesWithQueries: resolveSupportedLanguagesUsingCli,
@@ -282,6 +283,7 @@ export async function getLanguages(
languagesInput: string | undefined,
repository: RepositoryNwo,
sourceRoot: string,
features: FeatureEnablement,
logger: Logger,
): Promise<Language[]> {
// Obtain languages without filtering them.
@@ -292,7 +294,7 @@ export async function getLanguages(
logger,
);
const languageMap = await getSupportedLanguageMap(codeql, logger);
const languageMap = await getSupportedLanguageMap(codeql, features, logger);
const languagesSet = new Set<Language>();
const unknownLanguages: string[] = [];
@@ -429,6 +431,7 @@ export async function initActionState(
languagesInput,
repository,
sourceRoot,
features,
logger,
);

View File

@@ -5,6 +5,7 @@ import * as artifact from "@actions/artifact";
import * as artifactLegacy from "@actions/artifact-legacy";
import * as core from "@actions/core";
import archiver from "archiver";
import * as del from "del";
import { getOptionalInput, getTemporaryDirectory } from "./actions-util";
import { dbIsFinalized } from "./analyze";
@@ -344,7 +345,7 @@ async function createPartialDatabaseBundle(
);
// See `bundleDb` for explanation behind deleting existing db bundle.
if (fs.existsSync(databaseBundlePath)) {
await fs.promises.rm(databaseBundlePath, { force: true });
await del.deleteAsync(databaseBundlePath, { force: true });
}
const output = fs.createWriteStream(databaseBundlePath);
const zip = archiver("zip");

View File

@@ -1,157 +0,0 @@
import * as fs from "fs";
import path from "path";
import test from "ava";
// import * as sinon from "sinon";
import { cacheKeyHashLength } from "./caching-utils";
import { createStubCodeQL } from "./codeql";
import {
CacheConfig,
checkHashPatterns,
getFeaturePrefix,
makePatternCheck,
} from "./dependency-caching";
import { Feature } from "./feature-flags";
import { KnownLanguage } from "./languages";
import {
setupTests,
createFeatures,
getRecordingLogger,
checkExpectedLogMessages,
LoggedMessage,
} from "./testing-utils";
import { withTmpDir } from "./util";
setupTests(test);
function makeAbsolutePatterns(tmpDir: string, patterns: string[]): string[] {
return patterns.map((pattern) => path.join(tmpDir, pattern));
}
test("makePatternCheck - returns undefined if no patterns match", async (t) => {
await withTmpDir(async (tmpDir) => {
fs.writeFileSync(path.join(tmpDir, "test.java"), "");
const result = await makePatternCheck(
makeAbsolutePatterns(tmpDir, ["**/*.cs"]),
);
t.is(result, undefined);
});
});
test("makePatternCheck - returns all patterns if any pattern matches", async (t) => {
await withTmpDir(async (tmpDir) => {
fs.writeFileSync(path.join(tmpDir, "test.java"), "");
const patterns = makeAbsolutePatterns(tmpDir, ["**/*.cs", "**/*.java"]);
const result = await makePatternCheck(patterns);
t.deepEqual(result, patterns);
});
});
test("checkHashPatterns - logs when no patterns match", async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([]);
const messages: LoggedMessage[] = [];
const config: CacheConfig = {
getDependencyPaths: () => [],
getHashPatterns: async () => undefined,
};
const result = await checkHashPatterns(
codeql,
features,
KnownLanguage.csharp,
config,
getRecordingLogger(messages),
);
t.is(result, undefined);
checkExpectedLogMessages(t, messages, [
"Skipping download of dependency cache",
]);
});
test("checkHashPatterns - returns patterns when patterns match", async (t) => {
await withTmpDir(async (tmpDir) => {
const codeql = createStubCodeQL({});
const features = createFeatures([]);
const messages: LoggedMessage[] = [];
const patterns = makeAbsolutePatterns(tmpDir, ["**/*.cs", "**/*.java"]);
fs.writeFileSync(path.join(tmpDir, "test.java"), "");
const config: CacheConfig = {
getDependencyPaths: () => [],
getHashPatterns: async () => makePatternCheck(patterns),
};
const result = await checkHashPatterns(
codeql,
features,
KnownLanguage.csharp,
config,
getRecordingLogger(messages),
);
t.deepEqual(result, patterns);
t.deepEqual(messages, []);
});
});
test("getFeaturePrefix - returns empty string if no features are enabled", async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([]);
for (const knownLanguage of Object.values(KnownLanguage)) {
const result = await getFeaturePrefix(codeql, features, knownLanguage);
t.deepEqual(result, "", `Expected no feature prefix for ${knownLanguage}`);
}
});
test("getFeaturePrefix - Java - returns 'minify-' if JavaMinimizeDependencyJars is enabled", async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([Feature.JavaMinimizeDependencyJars]);
const result = await getFeaturePrefix(codeql, features, KnownLanguage.java);
t.deepEqual(result, "minify-");
});
test("getFeaturePrefix - non-Java - returns '' if JavaMinimizeDependencyJars is enabled", async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([Feature.JavaMinimizeDependencyJars]);
for (const knownLanguage of Object.values(KnownLanguage)) {
// Skip Java since we expect a result for it, which is tested in the previous test.
if (knownLanguage === KnownLanguage.java) {
continue;
}
const result = await getFeaturePrefix(codeql, features, knownLanguage);
t.deepEqual(result, "", `Expected no feature prefix for ${knownLanguage}`);
}
});
test("getFeaturePrefix - C# - returns prefix if CsharpNewCacheKey is enabled", async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([Feature.CsharpNewCacheKey]);
const result = await getFeaturePrefix(codeql, features, KnownLanguage.csharp);
t.notDeepEqual(result, "");
t.assert(result.endsWith("-"));
// Check the length of the prefix, which should correspond to `cacheKeyHashLength` + 1 for the trailing `-`.
t.is(result.length, cacheKeyHashLength + 1);
});
test("getFeaturePrefix - non-C# - returns '' if CsharpNewCacheKey is enabled", async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([Feature.CsharpNewCacheKey]);
for (const knownLanguage of Object.values(KnownLanguage)) {
// Skip C# since we expect a result for it, which is tested in the previous test.
if (knownLanguage === KnownLanguage.csharp) {
continue;
}
const result = await getFeaturePrefix(codeql, features, knownLanguage);
t.deepEqual(result, "", `Expected no feature prefix for ${knownLanguage}`);
}
});

View File

@@ -6,11 +6,9 @@ import * as glob from "@actions/glob";
import { getTemporaryDirectory } from "./actions-util";
import { listActionsCaches } from "./api-client";
import { createCacheKeyHash, getTotalCacheSize } from "./caching-utils";
import { CodeQL } from "./codeql";
import { getTotalCacheSize } from "./caching-utils";
import { Config } from "./config-utils";
import { EnvVar } from "./environment";
import { Feature, FeatureEnablement, Features } from "./feature-flags";
import { KnownLanguage, Language } from "./languages";
import { Logger } from "./logging";
import { getErrorMessage, getRequiredEnvParam } from "./util";
@@ -18,21 +16,15 @@ import { getErrorMessage, getRequiredEnvParam } from "./util";
/**
* Caching configuration for a particular language.
*/
export interface CacheConfig {
/** Gets the paths of directories on the runner that should be included in the cache. */
getDependencyPaths: () => string[];
interface CacheConfig {
/** The paths of directories on the runner that should be included in the cache. */
paths: string[];
/**
* Gets an array of glob patterns for the paths of files whose contents affect which dependencies are used
* by a project. This function also checks whether there are any matching files and returns
* `undefined` if no files match.
*
* The glob patterns are intended to be used for cache keys, where we find all files which match these
* patterns, calculate a hash for their contents, and use that hash as part of the cache key.
* Patterns for the paths of files whose contents affect which dependencies are used
* by a project. We find all files which match these patterns, calculate a hash for
* their contents, and use that hash as part of the cache key.
*/
getHashPatterns: (
codeql: CodeQL,
features: FeatureEnablement,
) => Promise<string[] | undefined>;
hash: string[];
}
const CODEQL_DEPENDENCY_CACHE_PREFIX = "codeql-dependencies";
@@ -47,100 +39,21 @@ export function getJavaTempDependencyDir(): string {
return join(getTemporaryDirectory(), "codeql_java", "repository");
}
/**
* Returns an array of paths of directories on the runner that should be included in a dependency cache
* for a Java analysis. It is important that this is a function, because we call `getTemporaryDirectory`
* which would otherwise fail in tests if we haven't had a chance to initialise `RUNNER_TEMP`.
*
* @returns The paths of directories on the runner that should be included in a dependency cache
* for a Java analysis.
*/
export function getJavaDependencyDirs(): string[] {
return [
// Maven
join(os.homedir(), ".m2", "repository"),
// Gradle
join(os.homedir(), ".gradle", "caches"),
// CodeQL Java build-mode: none
getJavaTempDependencyDir(),
];
}
/**
* Checks that there are files which match `patterns`. If there are matching files for any of the patterns,
* this function returns all `patterns`. Otherwise, `undefined` is returned.
*
* @param patterns The glob patterns to find matching files for.
* @returns The array of glob patterns if there are matching files, or `undefined` otherwise.
*/
export async function makePatternCheck(
patterns: string[],
): Promise<string[] | undefined> {
const globber = await makeGlobber(patterns);
if ((await globber.glob()).length === 0) {
return undefined;
}
return patterns;
}
/**
* Returns the list of glob patterns that should be used to calculate the cache key hash
* for a C# dependency cache.
*
* @param codeql The CodeQL instance to use.
* @param features Information about which FFs are enabled.
* @returns A list of glob patterns to use for hashing.
*/
async function getCsharpHashPatterns(
codeql: CodeQL,
features: FeatureEnablement,
): Promise<string[] | undefined> {
// These files contain accurate information about dependencies, including the exact versions
// that the relevant package manager has determined for the project. Using these gives us
// stable hashes unless the dependencies change.
const basePatterns = [
// NuGet
"**/packages.lock.json",
// Paket
"**/paket.lock",
];
const globber = await makeGlobber(basePatterns);
if ((await globber.glob()).length > 0) {
return basePatterns;
}
if (await features.getValue(Feature.CsharpNewCacheKey, codeql)) {
// These are less accurate for use in cache key calculations, because they:
//
// - Don't contain the exact versions used. They may only contain version ranges or none at all.
// - They contain information unrelated to dependencies, which we don't care about.
//
// As a result, the hash we compute from these files may change, even if
// the dependencies haven't changed.
return makePatternCheck([
"**/*.csproj",
"**/packages.config",
"**/nuget.config",
]);
}
// If we get to this point, the `basePatterns` didn't find any files,
// and `Feature.CsharpNewCacheKey` is either not enabled or we didn't
// find any files using those patterns either.
return undefined;
}
/**
* Default caching configurations per language.
*/
const defaultCacheConfigs: { [language: string]: CacheConfig } = {
java: {
getDependencyPaths: getJavaDependencyDirs,
getHashPatterns: async () =>
makePatternCheck([
function getDefaultCacheConfig(): { [language: string]: CacheConfig } {
return {
java: {
paths: [
// Maven
join(os.homedir(), ".m2", "repository"),
// Gradle
join(os.homedir(), ".gradle", "caches"),
// CodeQL Java build-mode: none
getJavaTempDependencyDir(),
],
hash: [
// Maven
"**/pom.xml",
// Gradle
@@ -150,17 +63,23 @@ const defaultCacheConfigs: { [language: string]: CacheConfig } = {
"buildSrc/**/Dependencies.kt",
"gradle/*.versions.toml",
"**/versions.properties",
]),
},
csharp: {
getDependencyPaths: () => [join(os.homedir(), ".nuget", "packages")],
getHashPatterns: getCsharpHashPatterns,
},
go: {
getDependencyPaths: () => [join(os.homedir(), "go", "pkg", "mod")],
getHashPatterns: async () => makePatternCheck(["**/go.sum"]),
},
};
],
},
csharp: {
paths: [join(os.homedir(), ".nuget", "packages")],
hash: [
// NuGet
"**/packages.lock.json",
// Paket
"**/paket.lock",
],
},
go: {
paths: [join(os.homedir(), "go", "pkg", "mod")],
hash: ["**/go.sum"],
},
};
}
async function makeGlobber(patterns: string[]): Promise<glob.Globber> {
return glob.create(patterns.join("\n"));
@@ -188,55 +107,23 @@ export interface DependencyCacheRestoreStatus {
/** An array of `DependencyCacheRestoreStatus` objects for each analysed language with a caching configuration. */
export type DependencyCacheRestoreStatusReport = DependencyCacheRestoreStatus[];
/**
* A wrapper around `cacheConfig.getHashPatterns` which logs when there are no files to calculate
* a hash for the cache key from.
*
* @param codeql The CodeQL instance to use.
* @param features Information about which FFs are enabled.
* @param language The language the `CacheConfig` is for. For use in the log message.
* @param cacheConfig The caching configuration to call `getHashPatterns` on.
* @param logger The logger to write the log message to if there is an error.
* @returns An array of glob patterns to use for hashing files, or `undefined` if there are no matching files.
*/
export async function checkHashPatterns(
codeql: CodeQL,
features: FeatureEnablement,
language: Language,
cacheConfig: CacheConfig,
logger: Logger,
): Promise<string[] | undefined> {
const patterns = await cacheConfig.getHashPatterns(codeql, features);
if (patterns === undefined) {
logger.info(
`Skipping download of dependency cache for ${language} as we cannot calculate a hash for the cache key.`,
);
}
return patterns;
}
/**
* Attempts to restore dependency caches for the languages being analyzed.
*
* @param codeql The CodeQL instance to use.
* @param features Information about which FFs are enabled.
* @param languages The languages being analyzed.
* @param logger A logger to record some informational messages to.
*
* @param minimizeJavaJars Whether the Java extractor should rewrite downloaded JARs to minimize their size.
* @returns An array of `DependencyCacheRestoreStatus` objects for each analysed language with a caching configuration.
*/
export async function downloadDependencyCaches(
codeql: CodeQL,
features: Features,
languages: Language[],
logger: Logger,
minimizeJavaJars: boolean,
): Promise<DependencyCacheRestoreStatusReport> {
const status: DependencyCacheRestoreStatusReport = [];
for (const language of languages) {
const cacheConfig = defaultCacheConfigs[language];
const cacheConfig = getDefaultCacheConfig()[language];
if (cacheConfig === undefined) {
logger.info(
@@ -247,21 +134,19 @@ export async function downloadDependencyCaches(
// Check that we can find files to calculate the hash for the cache key from, so we don't end up
// with an empty string.
const patterns = await checkHashPatterns(
codeql,
features,
language,
cacheConfig,
logger,
);
if (patterns === undefined) {
const globber = await makeGlobber(cacheConfig.hash);
if ((await globber.glob()).length === 0) {
status.push({ language, hit_kind: CacheHitKind.NoHash });
logger.info(
`Skipping download of dependency cache for ${language} as we cannot calculate a hash for the cache key.`,
);
continue;
}
const primaryKey = await cacheKey(codeql, features, language, patterns);
const primaryKey = await cacheKey(language, cacheConfig, minimizeJavaJars);
const restoreKeys: string[] = [
await cachePrefix(codeql, features, language),
await cachePrefix(language, minimizeJavaJars),
];
logger.info(
@@ -272,7 +157,7 @@ export async function downloadDependencyCaches(
const start = performance.now();
const hitKey = await actionsCache.restoreCache(
cacheConfig.getDependencyPaths(),
cacheConfig.paths,
primaryKey,
restoreKeys,
);
@@ -318,22 +203,20 @@ export type DependencyCacheUploadStatusReport = DependencyCacheUploadStatus[];
/**
* Attempts to store caches for the languages that were analyzed.
*
* @param codeql The CodeQL instance to use.
* @param features Information about which FFs are enabled.
* @param config The configuration for this workflow.
* @param logger A logger to record some informational messages to.
* @param minimizeJavaJars Whether the Java extractor should rewrite downloaded JARs to minimize their size.
*
* @returns An array of `DependencyCacheUploadStatus` objects for each analysed language with a caching configuration.
*/
export async function uploadDependencyCaches(
codeql: CodeQL,
features: Features,
config: Config,
logger: Logger,
minimizeJavaJars: boolean,
): Promise<DependencyCacheUploadStatusReport> {
const status: DependencyCacheUploadStatusReport = [];
for (const language of config.languages) {
const cacheConfig = defaultCacheConfigs[language];
const cacheConfig = getDefaultCacheConfig()[language];
if (cacheConfig === undefined) {
logger.info(
@@ -344,14 +227,13 @@ export async function uploadDependencyCaches(
// Check that we can find files to calculate the hash for the cache key from, so we don't end up
// with an empty string.
const patterns = await checkHashPatterns(
codeql,
features,
language,
cacheConfig,
logger,
);
if (patterns === undefined) {
const globber = await makeGlobber(cacheConfig.hash);
if ((await globber.glob()).length === 0) {
status.push({ language, result: CacheStoreResult.NoHash });
logger.info(
`Skipping upload of dependency cache for ${language} as we cannot calculate a hash for the cache key.`,
);
continue;
}
@@ -365,11 +247,7 @@ export async function uploadDependencyCaches(
// use the cache quota that we compete with. In that case, we do not wish to use up all of the quota
// with the dependency caches. For this, we could use the Cache API to check whether other workflows
// are using the quota and how full it is.
const size = await getTotalCacheSize(
cacheConfig.getDependencyPaths(),
logger,
true,
);
const size = await getTotalCacheSize(cacheConfig.paths, logger, true);
// Skip uploading an empty cache.
if (size === 0) {
@@ -380,7 +258,7 @@ export async function uploadDependencyCaches(
continue;
}
const key = await cacheKey(codeql, features, language, patterns);
const key = await cacheKey(language, cacheConfig, minimizeJavaJars);
logger.info(
`Uploading cache of size ${size} for ${language} with key ${key}...`,
@@ -388,7 +266,7 @@ export async function uploadDependencyCaches(
try {
const start = performance.now();
await actionsCache.saveCache(cacheConfig.getDependencyPaths(), key);
await actionsCache.saveCache(cacheConfig.paths, key);
const upload_duration_ms = Math.round(performance.now() - start);
status.push({
@@ -421,85 +299,31 @@ export async function uploadDependencyCaches(
/**
* Computes a cache key for the specified language.
*
* @param codeql The CodeQL instance to use.
* @param features Information about which FFs are enabled.
* @param language The language being analyzed.
* @param cacheConfig The cache configuration for the language.
* @param minimizeJavaJars Whether the Java extractor should rewrite downloaded JARs to minimize their size.
* @returns A cache key capturing information about the project(s) being analyzed in the specified language.
*/
async function cacheKey(
codeql: CodeQL,
features: Features,
language: Language,
patterns: string[],
cacheConfig: CacheConfig,
minimizeJavaJars: boolean = false,
): Promise<string> {
const hash = await glob.hashFiles(patterns.join("\n"));
return `${await cachePrefix(codeql, features, language)}${hash}`;
}
/**
* If experimental features which the cache contents depend on are enabled for the current language,
* this function returns a prefix that uniquely identifies the set of enabled features. The purpose of
* this is to avoid restoring caches whose contents depended on experimental features, if those
* experimental features are later disabled.
*
* @param codeql The CodeQL instance.
* @param features Information about enabled features.
* @param language The language we are creating the key for.
*
* @returns A cache key prefix identifying the enabled, experimental features that the cache depends on.
*/
export async function getFeaturePrefix(
codeql: CodeQL,
features: FeatureEnablement,
language: Language,
): Promise<string> {
const enabledFeatures: Feature[] = [];
const addFeatureIfEnabled = async (feature: Feature) => {
if (await features.getValue(feature, codeql)) {
enabledFeatures.push(feature);
}
};
if (language === KnownLanguage.java) {
// To ensure a safe rollout of JAR minimization, we change the key when the feature is enabled.
const minimizeJavaJars = await features.getValue(
Feature.JavaMinimizeDependencyJars,
codeql,
);
// To maintain backwards compatibility with this, we return "minify-" instead of a hash.
if (minimizeJavaJars) {
return "minify-";
}
} else if (language === KnownLanguage.csharp) {
await addFeatureIfEnabled(Feature.CsharpNewCacheKey);
}
// If any features that affect the cache are enabled, return a feature prefix by
// computing a hash of the feature array.
if (enabledFeatures.length > 0) {
return `${createCacheKeyHash(enabledFeatures)}-`;
}
// No feature prefix.
return "";
const hash = await glob.hashFiles(cacheConfig.hash.join("\n"));
return `${await cachePrefix(language, minimizeJavaJars)}${hash}`;
}
/**
* Constructs a prefix for the cache key, comprised of a CodeQL-specific prefix, a version number that
* can be changed to invalidate old caches, the runner's operating system, and the specified language name.
*
* @param codeql The CodeQL instance to use.
* @param features Information about which FFs are enabled.
* @param language The language being analyzed.
* @param minimizeJavaJars Whether the Java extractor should rewrite downloaded JARs to minimize their size.
* @returns The prefix that identifies what a cache is for.
*/
async function cachePrefix(
codeql: CodeQL,
features: Features,
language: Language,
minimizeJavaJars: boolean,
): Promise<string> {
const runnerOs = getRequiredEnvParam("RUNNER_OS");
const customPrefix = process.env[EnvVar.DEPENDENCY_CACHING_PREFIX];
@@ -509,12 +333,12 @@ async function cachePrefix(
prefix = `${prefix}-${customPrefix}`;
}
// Calculate the feature prefix for the cache, if any. This is a hash that identifies
// experimental features that affect the cache contents.
const featurePrefix = await getFeaturePrefix(codeql, features, language);
// To ensure a safe rollout of JAR minimization, we change the key when the feature is enabled.
if (language === KnownLanguage.java && minimizeJavaJars) {
prefix = `minify-${prefix}`;
}
// Assemble the cache key.
return `${featurePrefix}${prefix}-${CODEQL_DEPENDENCY_CACHE_VERSION}-${runnerOs}-${language}-`;
return `${prefix}-${CODEQL_DEPENDENCY_CACHE_VERSION}-${runnerOs}-${language}-`;
}
/** Represents information about our overall cache usage for CodeQL dependency caches. */

View File

@@ -137,10 +137,4 @@ export enum EnvVar {
* This setting is more specific than `CODEQL_ACTION_TEST_MODE`, which implies this option.
*/
SKIP_SARIF_UPLOAD = "CODEQL_ACTION_SKIP_SARIF_UPLOAD",
/**
* Whether to skip workflow validation. Intended for internal use, where we know that
* the workflow is valid and validation is not necessary.
*/
SKIP_WORKFLOW_VALIDATION = "CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION",
}

View File

@@ -47,7 +47,6 @@ export enum Feature {
AnalyzeUseNewUpload = "analyze_use_new_upload",
CleanupTrapCaches = "cleanup_trap_caches",
CppDependencyInstallation = "cpp_dependency_installation_enabled",
CsharpNewCacheKey = "csharp_new_cache_key",
DiffInformedQueries = "diff_informed_queries",
DisableCsharpBuildless = "disable_csharp_buildless",
DisableJavaBuildlessEnabled = "disable_java_buildless_enabled",
@@ -77,6 +76,7 @@ export enum Feature {
OverlayAnalysisSwift = "overlay_analysis_swift",
PythonDefaultIsToNotExtractStdlib = "python_default_is_to_not_extract_stdlib",
QaTelemetryEnabled = "qa_telemetry_enabled",
ResolveSupportedLanguagesUsingCli = "resolve_supported_languages_using_cli",
UseRepositoryProperties = "use_repository_properties",
ValidateDbConfig = "validate_db_config",
}
@@ -133,11 +133,6 @@ export const featureConfig: Record<
legacyApi: true,
minimumVersion: "2.15.0",
},
[Feature.CsharpNewCacheKey]: {
defaultValue: false,
envVar: "CODEQL_ACTION_CSHARP_NEW_CACHE_KEY",
minimumVersion: undefined,
},
[Feature.DiffInformedQueries]: {
defaultValue: true,
envVar: "CODEQL_ACTION_DIFF_INFORMED_QUERIES",
@@ -166,6 +161,12 @@ export const featureConfig: Record<
legacyApi: true,
minimumVersion: undefined,
},
[Feature.ResolveSupportedLanguagesUsingCli]: {
defaultValue: false,
envVar: "CODEQL_ACTION_RESOLVE_SUPPORTED_LANGUAGES_USING_CLI",
minimumVersion: undefined,
toolsFeature: ToolsFeature.BuiltinExtractorsSpecifyDefaultQueries,
},
[Feature.OverlayAnalysis]: {
defaultValue: false,
envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS",

View File

@@ -86,7 +86,7 @@ import {
getErrorMessage,
BuildMode,
} from "./util";
import { checkWorkflow } from "./workflow";
import { validateWorkflow } from "./workflow";
/**
* Sends a status report indicating that the `init` Action is starting.
@@ -288,9 +288,16 @@ async function run() {
toolsSource = initCodeQLResult.toolsSource;
zstdAvailability = initCodeQLResult.zstdAvailability;
// Check the workflow for problems. If there are any problems, they are reported
// to the workflow log. No exceptions are thrown.
await checkWorkflow(logger, codeql);
core.startGroup("Validating workflow");
const validateWorkflowResult = await validateWorkflow(codeql, logger);
if (validateWorkflowResult === undefined) {
logger.info("Detected no issues with the code scanning workflow.");
} else {
logger.warning(
`Unable to validate code scanning workflow: ${validateWorkflowResult}`,
);
}
core.endGroup();
// Set CODEQL_ENABLE_EXPERIMENTAL_FEATURES for Rust if between 2.19.3 (included) and 2.22.1 (excluded)
// We need to set this environment variable before initializing the config, otherwise Rust
@@ -578,12 +585,15 @@ async function run() {
}
// Restore dependency cache(s), if they exist.
const minimizeJavaJars = await features.getValue(
Feature.JavaMinimizeDependencyJars,
codeql,
);
if (shouldRestoreCache(config.dependencyCachingEnabled)) {
dependencyCachingResults = await downloadDependencyCaches(
codeql,
features,
config.languages,
logger,
minimizeJavaJars,
);
}
@@ -645,7 +655,7 @@ async function run() {
`${EnvVar.JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS} is already set to '${process.env[EnvVar.JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS]}', so the Action will not override it.`,
);
} else if (
(await features.getValue(Feature.JavaMinimizeDependencyJars, codeql)) &&
minimizeJavaJars &&
config.dependencyCachingEnabled &&
config.buildMode === BuildMode.None &&
config.languages.includes(KnownLanguage.java)

View File

@@ -13,15 +13,7 @@ export interface Logger {
}
export function getActionsLogger(): Logger {
return {
debug: core.debug,
info: core.info,
warning: core.warning,
error: core.error,
isDebug: core.isDebug,
startGroup: core.startGroup,
endGroup: core.endGroup,
};
return core;
}
export function getRunnerLogger(debugMode: boolean): Logger {

View File

@@ -1,3 +1,4 @@
import * as crypto from "crypto";
import * as fs from "fs";
import * as path from "path";
@@ -10,7 +11,6 @@ import {
getWorkflowRunID,
} from "./actions-util";
import { getAutomationID } from "./api-client";
import { createCacheKeyHash } from "./caching-utils";
import { type CodeQL } from "./codeql";
import { type Config } from "./config-utils";
import { getCommitOid, getFileOidsUnderPath } from "./git-utils";
@@ -514,3 +514,27 @@ export async function getCacheRestoreKeyPrefix(
// easier to debug and understand the cache key structure.
return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languages}-${codeQlVersion}-`;
}
/**
* Creates a SHA-256 hash of the cache key components to ensure uniqueness
* while keeping the cache key length manageable.
*
* @param components Object containing all components that should influence cache key uniqueness
* @returns A short SHA-256 hash (first 16 characters) of the components
*/
function createCacheKeyHash(components: Record<string, any>): string {
// From https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify
//
// "Properties are visited using the same algorithm as Object.keys(), which
// has a well-defined order and is stable across implementations. For example,
// JSON.stringify on the same object will always produce the same string, and
// JSON.parse(JSON.stringify(obj)) would produce an object with the same key
// ordering as the original (assuming the object is completely
// JSON-serializable)."
const componentsJson = JSON.stringify(components);
return crypto
.createHash("sha256")
.update(componentsJson)
.digest("hex")
.substring(0, 16);
}

View File

@@ -9,7 +9,7 @@ import * as semver from "semver";
import { CommandInvocationError } from "./actions-util";
import { Logger } from "./logging";
import { assertNever, cleanUpPath, isBinaryAccessible } from "./util";
import { assertNever, cleanUpGlob, isBinaryAccessible } from "./util";
const MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3";
const MIN_REQUIRED_GNU_TAR_VERSION = "1.31";
@@ -217,7 +217,7 @@ export async function extractTarZst(
});
});
} catch (e) {
await cleanUpPath(dest, "extraction destination directory", logger);
await cleanUpGlob(dest, "extraction destination directory", logger);
throw e;
}
}

View File

@@ -12,7 +12,7 @@ import * as semver from "semver";
import { formatDuration, Logger } from "./logging";
import * as tar from "./tar";
import { cleanUpPath, getErrorMessage, getRequiredEnvParam } from "./util";
import { cleanUpGlob, getErrorMessage, getRequiredEnvParam } from "./util";
/**
* High watermark to use when streaming the download and extraction of the CodeQL tools.
@@ -130,7 +130,7 @@ export async function downloadAndExtract(
// If we failed during processing, we want to clean up the destination directory
// before we try again.
await cleanUpPath(dest, "CodeQL bundle", logger);
await cleanUpGlob(dest, "CodeQL bundle", logger);
}
const toolsDownloadStart = performance.now();
@@ -167,7 +167,7 @@ export async function downloadAndExtract(
)}).`,
);
} finally {
await cleanUpPath(archivedBundlePath, "CodeQL bundle archive", logger);
await cleanUpGlob(archivedBundlePath, "CodeQL bundle archive", logger);
}
return {

View File

@@ -101,6 +101,16 @@ test("getMemoryFlag() throws if the ram input is < 0 or NaN", async (t) => {
}
});
test("getAddSnippetsFlag() should return the correct flag", (t) => {
t.deepEqual(util.getAddSnippetsFlag(true), "--sarif-add-snippets");
t.deepEqual(util.getAddSnippetsFlag("true"), "--sarif-add-snippets");
t.deepEqual(util.getAddSnippetsFlag(false), "--no-sarif-add-snippets");
t.deepEqual(util.getAddSnippetsFlag(undefined), "--no-sarif-add-snippets");
t.deepEqual(util.getAddSnippetsFlag("false"), "--no-sarif-add-snippets");
t.deepEqual(util.getAddSnippetsFlag("foo bar"), "--no-sarif-add-snippets");
});
test("getThreadsFlag() should return the correct --threads flag", (t) => {
const numCpus = os.cpus().length;
@@ -524,12 +534,3 @@ test("getCgroupCpuCountFromCpus returns undefined if the CPU file exists but is
);
});
});
test("checkDiskUsage succeeds and produces positive numbers", async (t) => {
process.env["GITHUB_WORKSPACE"] = os.tmpdir();
const diskUsage = await util.checkDiskUsage(getRunnerLogger(true));
if (t.truthy(diskUsage)) {
t.true(diskUsage.numAvailableBytes > 0);
t.true(diskUsage.numTotalBytes > 0);
}
});

View File

@@ -6,6 +6,7 @@ import * as path from "path";
import * as core from "@actions/core";
import * as exec from "@actions/exec/lib/exec";
import * as io from "@actions/io";
import * as del from "del";
import getFolderSize from "get-folder-size";
import * as yaml from "js-yaml";
import * as semver from "semver";
@@ -166,7 +167,7 @@ export async function withTmpDir<T>(
): Promise<T> {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "codeql-action-"));
const result = await body(tmpDir);
await fs.promises.rm(tmpDir, { force: true, recursive: true });
await del.deleteAsync(tmpDir, { force: true });
return result;
}
@@ -342,6 +343,21 @@ export function getMemoryFlag(
return `--ram=${megabytes}`;
}
/**
* Get the codeql flag to specify whether to add code snippets to the sarif file.
*
* @returns string
*/
export function getAddSnippetsFlag(
userInput: string | boolean | undefined,
): string {
if (typeof userInput === "string") {
// have to process specifically because any non-empty string is truthy
userInput = userInput.toLowerCase() === "true";
}
return userInput ? "--sarif-add-snippets" : "--no-sarif-add-snippets";
}
/**
* Get the value of the codeql `--threads` flag specified for the `threads`
* input. If no value was specified, all available threads will be used.
@@ -740,7 +756,7 @@ export async function bundleDb(
// from somewhere else or someone trying to make the action upload a
// non-database file.
if (fs.existsSync(databaseBundlePath)) {
await fs.promises.rm(databaseBundlePath, { force: true });
await del.deleteAsync(databaseBundlePath, { force: true });
}
await codeql.databaseBundle(databasePath, databaseBundlePath, dbName);
return databaseBundlePath;
@@ -1240,13 +1256,19 @@ export async function checkSipEnablement(
}
}
export async function cleanUpPath(file: string, name: string, logger: Logger) {
export async function cleanUpGlob(glob: string, name: string, logger: Logger) {
logger.debug(`Cleaning up ${name}.`);
try {
await fs.promises.rm(file, {
force: true,
recursive: true,
});
const deletedPaths = await del.deleteAsync(glob, { force: true });
if (deletedPaths.length === 0) {
logger.warning(
`Failed to clean up ${name}: no files found matching ${glob}.`,
);
} else if (deletedPaths.length === 1) {
logger.debug(`Cleaned up ${name}.`);
} else {
logger.debug(`Cleaned up ${name} (${deletedPaths.length} files).`);
}
} catch (e) {
logger.warning(`Failed to clean up ${name}: ${e}.`);
}

View File

@@ -2,17 +2,9 @@ import test, { ExecutionContext } from "ava";
import * as yaml from "js-yaml";
import * as sinon from "sinon";
import * as actionsUtil from "./actions-util";
import { createStubCodeQL, getCodeQLForTesting } from "./codeql";
import { EnvVar } from "./environment";
import { getCodeQLForTesting } from "./codeql";
import { setupTests } from "./testing-utils";
import {
checkExpectedLogMessages,
getRecordingLogger,
LoggedMessage,
setupTests,
} from "./testing-utils";
import {
checkWorkflow,
CodedError,
formatWorkflowCause,
formatWorkflowErrors,
@@ -21,7 +13,6 @@ import {
Workflow,
WorkflowErrors,
} from "./workflow";
import * as workflow from "./workflow";
function errorCodes(
actual: CodedError[],
@@ -879,78 +870,3 @@ test("getCategoryInputOrThrow throws error for workflow with multiple calls to a
},
);
});
test("checkWorkflow - validates workflow if `SKIP_WORKFLOW_VALIDATION` is not set", async (t) => {
const messages: LoggedMessage[] = [];
const codeql = createStubCodeQL({});
sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false);
const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow");
validateWorkflow.resolves(undefined);
await checkWorkflow(getRecordingLogger(messages), codeql);
t.assert(
validateWorkflow.calledOnce,
"`checkWorkflow` unexpectedly did not call `validateWorkflow`",
);
checkExpectedLogMessages(t, messages, [
"Detected no issues with the code scanning workflow.",
]);
});
test("checkWorkflow - logs problems with workflow validation", async (t) => {
const messages: LoggedMessage[] = [];
const codeql = createStubCodeQL({});
sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false);
const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow");
validateWorkflow.resolves("problem");
await checkWorkflow(getRecordingLogger(messages), codeql);
t.assert(
validateWorkflow.calledOnce,
"`checkWorkflow` unexpectedly did not call `validateWorkflow`",
);
checkExpectedLogMessages(t, messages, [
"Unable to validate code scanning workflow: problem",
]);
});
test("checkWorkflow - skips validation if `SKIP_WORKFLOW_VALIDATION` is `true`", async (t) => {
process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] = "true";
const messages: LoggedMessage[] = [];
const codeql = createStubCodeQL({});
sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false);
const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow");
await checkWorkflow(getRecordingLogger(messages), codeql);
t.assert(
validateWorkflow.notCalled,
"`checkWorkflow` called `validateWorkflow` unexpectedly",
);
t.is(messages.length, 0);
});
test("checkWorkflow - skips validation for `dynamic` workflows", async (t) => {
const messages: LoggedMessage[] = [];
const codeql = createStubCodeQL({});
const isDynamicWorkflow = sinon
.stub(actionsUtil, "isDynamicWorkflow")
.returns(true);
const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow");
await checkWorkflow(getRecordingLogger(messages), codeql);
t.assert(isDynamicWorkflow.calledOnce);
t.assert(
validateWorkflow.notCalled,
"`checkWorkflow` called `validateWorkflow` unexpectedly",
);
t.is(messages.length, 0);
});

View File

@@ -5,10 +5,8 @@ import zlib from "zlib";
import * as core from "@actions/core";
import * as yaml from "js-yaml";
import { isDynamicWorkflow } from "./actions-util";
import * as api from "./api-client";
import { CodeQL } from "./codeql";
import { EnvVar } from "./environment";
import { Logger } from "./logging";
import {
getRequiredEnvParam,
@@ -218,7 +216,7 @@ function hasWorkflowTrigger(triggerName: string, doc: Workflow): boolean {
return Object.prototype.hasOwnProperty.call(doc.on, triggerName);
}
async function validateWorkflow(
export async function validateWorkflow(
codeql: CodeQL,
logger: Logger,
): Promise<undefined | string> {
@@ -464,36 +462,3 @@ export function getCheckoutPathInputOrThrow(
) || getRequiredEnvParam("GITHUB_WORKSPACE") // if unspecified, checkout_path defaults to ${{ github.workspace }}
);
}
/**
* A wrapper around `validateWorkflow` which reports the outcome.
*
* @param logger The logger to use.
* @param codeql The CodeQL instance.
*/
export async function checkWorkflow(logger: Logger, codeql: CodeQL) {
// Check the workflow for problems, unless `SKIP_WORKFLOW_VALIDATION` is `true`
// or the workflow trigger is `dynamic`.
if (
!isDynamicWorkflow() &&
process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] !== "true"
) {
core.startGroup("Validating workflow");
const validateWorkflowResult = await internal.validateWorkflow(
codeql,
logger,
);
if (validateWorkflowResult === undefined) {
logger.info("Detected no issues with the code scanning workflow.");
} else {
logger.debug(
`Unable to validate code scanning workflow: ${validateWorkflowResult}`,
);
}
core.endGroup();
}
}
export const internal = {
validateWorkflow,
};