Compare commits

...

3 Commits

Author SHA1 Message Date
Henry Mercer
4d8b358273 Disable SIP disablement check 2025-10-28 12:27:34 +00:00
Henry Mercer
f336c09493 Add testing trigger 2025-10-28 12:26:18 +00:00
Henry Mercer
239e305d18 Check disk usage using Node.js API
This was introduced in Node.js 18
2025-10-27 18:34:23 +00:00
16 changed files with 589 additions and 1969 deletions

View File

@@ -9,9 +9,6 @@ env:
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v*
pull_request:
types:
- opened
@@ -56,42 +53,8 @@ jobs:
fail-fast: false
matrix:
include:
- os: macos-latest
version: stable-v2.17.6
- os: ubuntu-latest
version: stable-v2.17.6
- os: macos-latest
version: stable-v2.18.4
- os: ubuntu-latest
version: stable-v2.18.4
- os: macos-latest
version: stable-v2.19.4
- os: ubuntu-latest
version: stable-v2.19.4
- os: macos-latest
version: stable-v2.20.7
- os: ubuntu-latest
version: stable-v2.20.7
- os: macos-latest
version: stable-v2.21.4
- os: ubuntu-latest
version: stable-v2.21.4
- os: macos-latest
version: stable-v2.22.4
- os: ubuntu-latest
version: stable-v2.22.4
- os: macos-latest
version: default
- os: ubuntu-latest
version: default
- os: macos-latest
version: linked
- os: ubuntu-latest
version: linked
- os: macos-latest
version: nightly-latest
- os: ubuntu-latest
version: nightly-latest
name: Multi-language repository
if: github.triggering_actor != 'dependabot[bot]'
permissions:
@@ -185,6 +148,3 @@ jobs:
echo "Did not create a database for Swift, or created it in the wrong location."
exit 1
fi
env:
CODEQL_ACTION_RESOLVE_SUPPORTED_LANGUAGES_USING_CLI: true
CODEQL_ACTION_TEST_MODE: true

View File

@@ -26498,7 +26498,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",

349
lib/analyze-action.js generated
View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -28569,13 +28569,13 @@ var require_reusify = __commonJS({
current.next = null;
return current;
}
function release3(obj) {
function release2(obj) {
tail.next = obj;
tail = obj;
}
return {
get,
release: release3
release: release2
};
}
module2.exports = reusify;
@@ -28644,7 +28644,7 @@ var require_queue = __commonJS({
self2.paused = false;
for (var i = 0; i < self2.concurrency; i++) {
_running++;
release3();
release2();
}
}
function idle() {
@@ -28653,7 +28653,7 @@ var require_queue = __commonJS({
function push(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28673,7 +28673,7 @@ var require_queue = __commonJS({
function unshift(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28690,7 +28690,7 @@ var require_queue = __commonJS({
worker.call(context2, current.value, current.worked);
}
}
function release3(holder) {
function release2(holder) {
if (holder) {
cache.release(holder);
}
@@ -30711,8 +30711,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -30723,7 +30723,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -30814,7 +30814,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -30880,7 +30880,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -30890,7 +30890,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -32347,7 +32347,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -37286,8 +37285,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -37363,20 +37362,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -60858,7 +60857,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -62666,7 +62665,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -62813,7 +62812,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -71659,7 +71658,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -71675,7 +71674,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -85956,161 +85955,27 @@ var io2 = __toESM(require_io());
// src/util.ts
var fs4 = __toESM(require("fs"));
var fsPromises4 = __toESM(require("fs/promises"));
var os = __toESM(require("os"));
var path5 = __toESM(require("path"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/del/index.js
var import_promises5 = __toESM(require("node:fs/promises"), 1);
var import_node_path6 = __toESM(require("node:path"), 1);
var import_node_process5 = __toESM(require("node:process"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_process4 = __toESM(require("node:process"), 1);
// node_modules/globby/index.js
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_fs3 = __toESM(require("node:fs"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
// node_modules/globby/node_modules/@sindresorhus/merge-streams/index.js
var import_node_events = require("node:events");
var import_node_stream = require("node:stream");
var import_promises2 = require("node:stream/promises");
var import_promises = require("node:stream/promises");
function mergeStreams(streams) {
if (!Array.isArray(streams)) {
throw new TypeError(`Expected an array, got \`${typeof streams}\`.`);
@@ -86185,7 +86050,7 @@ var onMergedStreamFinished = async (passThroughStream, streams) => {
}
};
var onMergedStreamEnd = async (passThroughStream, { signal }) => {
await (0, import_promises2.finished)(passThroughStream, { signal, cleanup: true });
await (0, import_promises.finished)(passThroughStream, { signal, cleanup: true });
};
var onInputStreamsUnpipe = async (passThroughStream, streams, { signal }) => {
for await (const [unpipedStream] of (0, import_node_events.on)(passThroughStream, "unpipe", { signal })) {
@@ -86235,7 +86100,7 @@ var afterMergedStreamFinished = async (onFinished, stream2) => {
};
var onInputStreamEnd = async ({ passThroughStream, stream: stream2, streams, ended, aborted, controller: { signal } }) => {
try {
await (0, import_promises2.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
await (0, import_promises.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
if (streams.has(stream2)) {
ended.add(stream2);
}
@@ -86289,13 +86154,13 @@ var import_fast_glob2 = __toESM(require_out4(), 1);
// node_modules/path-type/index.js
var import_node_fs = __toESM(require("node:fs"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_promises2 = __toESM(require("node:fs/promises"), 1);
async function isType(fsStatType, statsMethodName, filePath) {
if (typeof filePath !== "string") {
throw new TypeError(`Expected a string, got ${typeof filePath}`);
}
try {
const stats = await import_promises3.default[fsStatType](filePath);
const stats = await import_promises2.default[fsStatType](filePath);
return stats[statsMethodName]();
} catch (error2) {
if (error2.code === "ENOENT") {
@@ -86325,20 +86190,20 @@ var isDirectorySync = isTypeSync.bind(void 0, "statSync", "isDirectory");
var isSymlinkSync = isTypeSync.bind(void 0, "lstatSync", "isSymbolicLink");
// node_modules/unicorn-magic/node.js
var import_node_util2 = require("node:util");
var import_node_child_process2 = require("node:child_process");
var import_node_util = require("node:util");
var import_node_child_process = require("node:child_process");
var import_node_url = require("node:url");
var execFileOriginal = (0, import_node_util2.promisify)(import_node_child_process2.execFile);
var execFileOriginal = (0, import_node_util.promisify)(import_node_child_process.execFile);
function toPath(urlOrPath) {
return urlOrPath instanceof URL ? (0, import_node_url.fileURLToPath)(urlOrPath) : urlOrPath;
}
var TEN_MEGABYTES_IN_BYTES = 10 * 1024 * 1024;
// node_modules/globby/ignore.js
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_process = __toESM(require("node:process"), 1);
var import_node_fs2 = __toESM(require("node:fs"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_node_path = __toESM(require("node:path"), 1);
var import_fast_glob = __toESM(require_out4(), 1);
var import_ignore = __toESM(require_ignore(), 1);
@@ -86366,16 +86231,16 @@ var ignoreFilesGlobOptions = {
dot: true
};
var GITIGNORE_FILES_PATTERN = "**/.gitignore";
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path2.default.posix.join(base, pattern.slice(1)) : import_node_path2.default.posix.join(base, pattern);
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path.default.posix.join(base, pattern.slice(1)) : import_node_path.default.posix.join(base, pattern);
var parseIgnoreFile = (file, cwd) => {
const base = slash(import_node_path2.default.relative(cwd, import_node_path2.default.dirname(file.filePath)));
const base = slash(import_node_path.default.relative(cwd, import_node_path.default.dirname(file.filePath)));
return file.content.split(/\r?\n/).filter((line) => line && !line.startsWith("#")).map((pattern) => applyBaseToPattern(pattern, base));
};
var toRelativePath = (fileOrDirectory, cwd) => {
cwd = slash(cwd);
if (import_node_path2.default.isAbsolute(fileOrDirectory)) {
if (import_node_path.default.isAbsolute(fileOrDirectory)) {
if (slash(fileOrDirectory).startsWith(cwd)) {
return import_node_path2.default.relative(cwd, fileOrDirectory);
return import_node_path.default.relative(cwd, fileOrDirectory);
}
throw new Error(`Path ${fileOrDirectory} is not in cwd ${cwd}`);
}
@@ -86391,7 +86256,7 @@ var getIsIgnoredPredicate = (files, cwd) => {
};
};
var normalizeOptions = (options = {}) => ({
cwd: toPath(options.cwd) ?? import_node_process2.default.cwd(),
cwd: toPath(options.cwd) ?? import_node_process.default.cwd(),
suppressErrors: Boolean(options.suppressErrors),
deep: typeof options.deep === "number" ? options.deep : Number.POSITIVE_INFINITY,
ignore: [...options.ignore ?? [], ...defaultIgnoredDirectories]
@@ -86408,7 +86273,7 @@ var isIgnoredByIgnoreFiles = async (patterns, options) => {
const files = await Promise.all(
paths.map(async (filePath) => ({
filePath,
content: await import_promises4.default.readFile(filePath, "utf8")
content: await import_promises3.default.readFile(filePath, "utf8")
}))
);
return getIsIgnoredPredicate(files, cwd);
@@ -86437,14 +86302,14 @@ var assertPatternsInput = (patterns) => {
};
var normalizePathForDirectoryGlob = (filePath, cwd) => {
const path20 = isNegativePattern(filePath) ? filePath.slice(1) : filePath;
return import_node_path3.default.isAbsolute(path20) ? path20 : import_node_path3.default.join(cwd, path20);
return import_node_path2.default.isAbsolute(path20) ? path20 : import_node_path2.default.join(cwd, path20);
};
var getDirectoryGlob = ({ directoryPath, files, extensions }) => {
const extensionGlob = extensions?.length > 0 ? `.${extensions.length > 1 ? `{${extensions.join(",")}}` : extensions[0]}` : "";
return files ? files.map((file) => import_node_path3.default.posix.join(directoryPath, `**/${import_node_path3.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path3.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
return files ? files.map((file) => import_node_path2.default.posix.join(directoryPath, `**/${import_node_path2.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path2.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
};
var directoryToGlob = async (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => {
@@ -86454,7 +86319,7 @@ var directoryToGlob = async (directoryPaths, {
return globs.flat();
};
var directoryToGlobSync = (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => directoryPaths.flatMap((directoryPath) => isDirectorySync(normalizePathForDirectoryGlob(directoryPath, cwd)) ? getDirectoryGlob({ directoryPath, files, extensions }) : directoryPath);
@@ -86512,7 +86377,7 @@ var getFilterSync = (options) => {
var createFilterFunction = (isIgnored) => {
const seen = /* @__PURE__ */ new Set();
return (fastGlobResult) => {
const pathKey = import_node_path3.default.normalize(fastGlobResult.path ?? fastGlobResult);
const pathKey = import_node_path2.default.normalize(fastGlobResult.path ?? fastGlobResult);
if (seen.has(pathKey) || isIgnored && isIgnored(pathKey)) {
return false;
}
@@ -86623,12 +86488,12 @@ var { convertPathToPattern } = import_fast_glob2.default;
var import_is_glob = __toESM(require_is_glob(), 1);
// node_modules/is-path-cwd/index.js
var import_node_process4 = __toESM(require("node:process"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
function isPathCwd(path_) {
let cwd = import_node_process4.default.cwd();
path_ = import_node_path4.default.resolve(path_);
if (import_node_process4.default.platform === "win32") {
let cwd = import_node_process3.default.cwd();
path_ = import_node_path3.default.resolve(path_);
if (import_node_process3.default.platform === "win32") {
cwd = cwd.toLowerCase();
path_ = path_.toLowerCase();
}
@@ -86636,11 +86501,11 @@ function isPathCwd(path_) {
}
// node_modules/del/node_modules/is-path-inside/index.js
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
function isPathInside(childPath, parentPath) {
const relation = import_node_path5.default.relative(parentPath, childPath);
const relation = import_node_path4.default.relative(parentPath, childPath);
return Boolean(
relation && relation !== ".." && !relation.startsWith(`..${import_node_path5.default.sep}`) && relation !== import_node_path5.default.resolve(childPath)
relation && relation !== ".." && !relation.startsWith(`..${import_node_path4.default.sep}`) && relation !== import_node_path4.default.resolve(childPath)
);
}
@@ -86779,14 +86644,14 @@ function safeCheck(file, cwd) {
function normalizePatterns(patterns) {
patterns = Array.isArray(patterns) ? patterns : [patterns];
patterns = patterns.map((pattern) => {
if (import_node_process5.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
if (import_node_process4.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
return slash(pattern);
}
return pattern;
});
return patterns;
}
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5.default.cwd(), onProgress = () => {
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process4.default.cwd(), onProgress = () => {
}, ...options } = {}) {
options = {
expandDirectories: false,
@@ -86807,12 +86672,12 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
let deletedCount = 0;
const mapper = async (file) => {
file = import_node_path6.default.resolve(cwd, file);
file = import_node_path5.default.resolve(cwd, file);
if (!force) {
safeCheck(file, cwd);
}
if (!dryRun) {
await import_promises5.default.rm(file, { recursive: true, force: true });
await import_promises4.default.rm(file, { recursive: true, force: true });
}
deletedCount += 1;
onProgress({
@@ -86829,7 +86694,7 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
// node_modules/get-folder-size/index.js
var import_node_path7 = require("node:path");
var import_node_path6 = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -86854,7 +86719,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path7.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path6.join)(itemPath, directoryItem))
)
);
}
@@ -89498,8 +89363,8 @@ function getToolNames(sarif) {
}
return Object.keys(toolNames);
}
function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, platform3) {
const fixedAmount = 1024 * (platform3 === "win32" ? 1.5 : 1);
function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, platform2) {
const fixedAmount = 1024 * (platform2 === "win32" ? 1.5 : 1);
const scaledAmount = getReservedRamScaleFactor() * Math.max(totalMemoryMegaBytes - 8 * 1024, 0);
return fixedAmount + scaledAmount;
}
@@ -89513,7 +89378,7 @@ function getReservedRamScaleFactor() {
}
return envVar / 100;
}
function getMemoryFlagValueForPlatform(userInput, totalMemoryBytes, platform3) {
function getMemoryFlagValueForPlatform(userInput, totalMemoryBytes, platform2) {
let memoryToUseMegaBytes;
if (userInput) {
memoryToUseMegaBytes = Number(userInput);
@@ -89526,7 +89391,7 @@ function getMemoryFlagValueForPlatform(userInput, totalMemoryBytes, platform3) {
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes(
totalMemoryMegaBytes,
platform3
platform2
);
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
}
@@ -89881,16 +89746,14 @@ function getErrorMessage(error2) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises4.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -89899,8 +89762,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -89934,34 +89797,6 @@ function satisfiesGHESVersion(ghesVersion, range, defaultIfInvalid) {
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
async function cleanUpGlob(glob2, name, logger) {
logger.debug(`Cleaning up ${name}.`);
try {
@@ -90655,7 +90490,7 @@ function getCliConfigCategoryIfExists(cliError) {
}
function isUnsupportedPlatform() {
return !SUPPORTED_PLATFORMS.some(
([platform3, arch2]) => platform3 === process.platform && arch2 === process.arch
([platform2, arch2]) => platform2 === process.platform && arch2 === process.arch
);
}
function getUnsupportedPlatformError(cliError) {
@@ -92341,17 +92176,17 @@ function getCodeQLBundleExtension(compressionMethod) {
}
function getCodeQLBundleName(compressionMethod) {
const extension = getCodeQLBundleExtension(compressionMethod);
let platform3;
let platform2;
if (process.platform === "win32") {
platform3 = "win64";
platform2 = "win64";
} else if (process.platform === "linux") {
platform3 = "linux64";
platform2 = "linux64";
} else if (process.platform === "darwin") {
platform3 = "osx64";
platform2 = "osx64";
} else {
return `codeql-bundle${extension}`;
}
return `codeql-bundle-${platform3}${extension}`;
return `codeql-bundle-${platform2}${extension}`;
}
function getCodeQLActionRepository(logger) {
if (isRunningLocalAction()) {
@@ -92385,12 +92220,12 @@ async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod
}
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release3 = await getApiClient().rest.repos.getReleaseByTag({
const release2 = await getApiClient().rest.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: tagName
});
for (const asset of release3.data.assets) {
for (const asset of release2.data.assets) {
if (asset.name === codeQLBundleName) {
logger.info(
`Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.`
@@ -92830,14 +92665,14 @@ async function getNightlyToolsUrl(logger) {
zstdAvailability.available
) ? "zstd" : "gzip";
try {
const release3 = await getApiClient().rest.repos.listReleases({
const release2 = await getApiClient().rest.repos.listReleases({
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
per_page: 1,
page: 1,
prerelease: true
});
const latestRelease = release3.data[0];
const latestRelease = release2.data[0];
if (!latestRelease) {
throw new Error("Could not find the latest nightly release.");
}

233
lib/autobuild-action.js generated
View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify2 } = require("util");
var { promisify } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify2(this[kOriginalClose])();
await promisify(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify2 } = require("util");
var { promisify } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify2(this[kOriginalClose])();
await promisify(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -24862,8 +24862,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -24874,7 +24874,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -24965,7 +24965,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -25031,7 +25031,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -25041,7 +25041,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -26498,7 +26498,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -31437,8 +31436,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -31514,20 +31513,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -55009,7 +55008,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -56817,7 +56816,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -56964,7 +56963,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -65810,7 +65809,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -65826,7 +65825,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -76104,148 +76103,14 @@ var github = __toESM(require_github());
var io2 = __toESM(require_io());
// src/util.ts
var fsPromises = __toESM(require("fs/promises"));
var path = __toESM(require("path"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/get-folder-size/index.js
var import_node_path2 = require("node:path");
var import_node_path = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -76270,7 +76135,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path2.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path.join)(itemPath, directoryItem))
)
);
}
@@ -79039,16 +78904,14 @@ function getErrorMessage(error2) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -79057,8 +78920,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -79084,34 +78947,6 @@ function checkActionVersion(version, githubVersion) {
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
async function asyncFilter(array, predicate) {
const results = await Promise.all(array.map(predicate));
return array.filter((_, index) => results[index]);
@@ -79579,7 +79414,7 @@ function getCliConfigCategoryIfExists(cliError) {
}
function isUnsupportedPlatform() {
return !SUPPORTED_PLATFORMS.some(
([platform2, arch]) => platform2 === process.platform && arch === process.arch
([platform, arch]) => platform === process.platform && arch === process.arch
);
}
function getUnsupportedPlatformError(cliError) {

397
lib/init-action-post.js generated
View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -28569,13 +28569,13 @@ var require_reusify = __commonJS({
current.next = null;
return current;
}
function release3(obj) {
function release2(obj) {
tail.next = obj;
tail = obj;
}
return {
get,
release: release3
release: release2
};
}
module2.exports = reusify;
@@ -28644,7 +28644,7 @@ var require_queue = __commonJS({
self2.paused = false;
for (var i = 0; i < self2.concurrency; i++) {
_running++;
release3();
release2();
}
}
function idle() {
@@ -28653,7 +28653,7 @@ var require_queue = __commonJS({
function push(value, done) {
var current = cache.get();
current.context = context3;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28673,7 +28673,7 @@ var require_queue = __commonJS({
function unshift(value, done) {
var current = cache.get();
current.context = context3;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28690,7 +28690,7 @@ var require_queue = __commonJS({
worker.call(context3, current.value, current.worked);
}
}
function release3(holder) {
function release2(holder) {
if (holder) {
cache.release(holder);
}
@@ -30711,8 +30711,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -30723,7 +30723,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -30814,7 +30814,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -30880,7 +30880,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -30890,7 +30890,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -32347,7 +32347,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -37286,8 +37285,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -37363,20 +37362,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -60858,7 +60857,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -62666,7 +62665,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -62813,7 +62812,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -71659,7 +71658,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -71675,7 +71674,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -87612,7 +87611,7 @@ var require_polyfills = __commonJS({
var constants = require("constants");
var origCwd = process.cwd;
var cwd = null;
var platform2 = process.env.GRACEFUL_FS_PLATFORM || process.platform;
var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform;
process.cwd = function() {
if (!cwd)
cwd = origCwd.call(process);
@@ -87671,7 +87670,7 @@ var require_polyfills = __commonJS({
fs20.lchownSync = function() {
};
}
if (platform2 === "win32") {
if (platform === "win32") {
fs20.rename = typeof fs20.rename !== "function" ? fs20.rename : (function(fs$rename) {
function rename(from, to, cb) {
var start = Date.now();
@@ -91192,8 +91191,8 @@ var require_primordials = __commonJS({
ArrayPrototypeIndexOf(self2, el) {
return self2.indexOf(el);
},
ArrayPrototypeJoin(self2, sep5) {
return self2.join(sep5);
ArrayPrototypeJoin(self2, sep4) {
return self2.join(sep4);
},
ArrayPrototypeMap(self2, fn) {
return self2.map(fn);
@@ -102004,7 +102003,7 @@ var require_commonjs16 = __commonJS({
var TYPEMASK = 1023;
var entToType = (s) => s.isFile() ? IFREG : s.isDirectory() ? IFDIR : s.isSymbolicLink() ? IFLNK : s.isCharacterDevice() ? IFCHR : s.isBlockDevice() ? IFBLK : s.isSocket() ? IFSOCK : s.isFIFO() ? IFIFO : UNKNOWN;
var normalizeCache = /* @__PURE__ */ new Map();
var normalize3 = (s) => {
var normalize2 = (s) => {
const c = normalizeCache.get(s);
if (c)
return c;
@@ -102017,7 +102016,7 @@ var require_commonjs16 = __commonJS({
const c = normalizeNocaseCache.get(s);
if (c)
return c;
const n = normalize3(s.toLowerCase());
const n = normalize2(s.toLowerCase());
normalizeNocaseCache.set(s, n);
return n;
};
@@ -102186,7 +102185,7 @@ var require_commonjs16 = __commonJS({
*/
constructor(name, type2 = UNKNOWN, root, roots, nocase, children, opts) {
this.name = name;
this.#matchName = nocase ? normalizeNocase(name) : normalize3(name);
this.#matchName = nocase ? normalizeNocase(name) : normalize2(name);
this.#type = type2 & TYPEMASK;
this.nocase = nocase;
this.roots = roots;
@@ -102279,7 +102278,7 @@ var require_commonjs16 = __commonJS({
return this.parent || this;
}
const children = this.children();
const name = this.nocase ? normalizeNocase(pathPart) : normalize3(pathPart);
const name = this.nocase ? normalizeNocase(pathPart) : normalize2(pathPart);
for (const p of children) {
if (p.#matchName === name) {
return p;
@@ -102524,7 +102523,7 @@ var require_commonjs16 = __commonJS({
* directly.
*/
isNamed(n) {
return !this.nocase ? this.#matchName === normalize3(n) : this.#matchName === normalizeNocase(n);
return !this.nocase ? this.#matchName === normalize2(n) : this.#matchName === normalizeNocase(n);
}
/**
* Return the Path object corresponding to the target of a symbolic link.
@@ -102663,7 +102662,7 @@ var require_commonjs16 = __commonJS({
#readdirMaybePromoteChild(e, c) {
for (let p = c.provisional; p < c.length; p++) {
const pchild = c[p];
const name = this.nocase ? normalizeNocase(e.name) : normalize3(e.name);
const name = this.nocase ? normalizeNocase(e.name) : normalize2(e.name);
if (name !== pchild.#matchName) {
continue;
}
@@ -103080,7 +103079,7 @@ var require_commonjs16 = __commonJS({
*
* @internal
*/
constructor(cwd = process.cwd(), pathImpl, sep5, { nocase, childrenCacheSize = 16 * 1024, fs: fs20 = defaultFS } = {}) {
constructor(cwd = process.cwd(), pathImpl, sep4, { nocase, childrenCacheSize = 16 * 1024, fs: fs20 = defaultFS } = {}) {
this.#fs = fsFromOption(fs20);
if (cwd instanceof URL || cwd.startsWith("file://")) {
cwd = (0, node_url_1.fileURLToPath)(cwd);
@@ -103091,7 +103090,7 @@ var require_commonjs16 = __commonJS({
this.#resolveCache = new ResolveCache();
this.#resolvePosixCache = new ResolveCache();
this.#children = new ChildrenCache(childrenCacheSize);
const split = cwdPath.substring(this.rootPath.length).split(sep5);
const split = cwdPath.substring(this.rootPath.length).split(sep4);
if (split.length === 1 && !split[0]) {
split.pop();
}
@@ -103714,7 +103713,7 @@ var require_pattern2 = __commonJS({
#isUNC;
#isAbsolute;
#followGlobstar = true;
constructor(patternList, globList, index, platform2) {
constructor(patternList, globList, index, platform) {
if (!isPatternList(patternList)) {
throw new TypeError("empty pattern list");
}
@@ -103731,7 +103730,7 @@ var require_pattern2 = __commonJS({
this.#patternList = patternList;
this.#globList = globList;
this.#index = index;
this.#platform = platform2;
this.#platform = platform;
if (this.#index === 0) {
if (this.isUNC()) {
const [p0, p1, p2, p3, ...prest] = this.#patternList;
@@ -103883,12 +103882,12 @@ var require_ignore2 = __commonJS({
absoluteChildren;
platform;
mmopts;
constructor(ignored, { nobrace, nocase, noext, noglobstar, platform: platform2 = defaultPlatform }) {
constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform }) {
this.relative = [];
this.absolute = [];
this.relativeChildren = [];
this.absoluteChildren = [];
this.platform = platform2;
this.platform = platform;
this.mmopts = {
dot: true,
nobrace,
@@ -103896,7 +103895,7 @@ var require_ignore2 = __commonJS({
noext,
noglobstar,
optimizationLevel: 2,
platform: platform2,
platform,
nocomment: true,
nonegate: true
};
@@ -106108,8 +106107,8 @@ var require_zip_archive_entry = __commonJS({
}
this.name = name;
};
ZipArchiveEntry.prototype.setPlatform = function(platform2) {
this.platform = platform2;
ZipArchiveEntry.prototype.setPlatform = function(platform) {
this.platform = platform;
};
ZipArchiveEntry.prototype.setSize = function(size) {
if (size < 0) {
@@ -110494,7 +110493,7 @@ var require_tr46 = __commonJS({
TRANSITIONAL: 0,
NONTRANSITIONAL: 1
};
function normalize3(str2) {
function normalize2(str2) {
return str2.split("\0").map(function(s) {
return s.normalize("NFC");
}).join("\0");
@@ -110574,7 +110573,7 @@ var require_tr46 = __commonJS({
processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;
}
var error2 = false;
if (normalize3(label) !== label || label[3] === "-" && label[4] === "-" || label[0] === "-" || label[label.length - 1] === "-" || label.indexOf(".") !== -1 || label.search(combiningMarksRegex) === 0) {
if (normalize2(label) !== label || label[3] === "-" && label[4] === "-" || label[0] === "-" || label[label.length - 1] === "-" || label.indexOf(".") !== -1 || label.search(combiningMarksRegex) === 0) {
error2 = true;
}
var len = countSymbols(label);
@@ -110592,7 +110591,7 @@ var require_tr46 = __commonJS({
}
function processing(domain_name, useSTD3, processing_option) {
var result = mapChars(domain_name, useSTD3, processing_option);
result.string = normalize3(result.string);
result.string = normalize2(result.string);
var labels = result.string.split(".");
for (var i = 0; i < labels.length; ++i) {
try {
@@ -118436,13 +118435,13 @@ var require_tmp = __commonJS({
var require_tmp_promise = __commonJS({
"node_modules/tmp-promise/index.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var tmp = require_tmp();
module2.exports.fileSync = tmp.fileSync;
var fileWithOptions = promisify3(
var fileWithOptions = promisify2(
(options, cb) => tmp.file(
options,
(err, path19, fd, cleanup) => err ? cb(err) : cb(void 0, { path: path19, fd, cleanup: promisify3(cleanup) })
(err, path19, fd, cleanup) => err ? cb(err) : cb(void 0, { path: path19, fd, cleanup: promisify2(cleanup) })
)
);
module2.exports.file = async (options) => fileWithOptions(options);
@@ -118455,10 +118454,10 @@ var require_tmp_promise = __commonJS({
}
};
module2.exports.dirSync = tmp.dirSync;
var dirWithOptions = promisify3(
var dirWithOptions = promisify2(
(options, cb) => tmp.dir(
options,
(err, path19, cleanup) => err ? cb(err) : cb(void 0, { path: path19, cleanup: promisify3(cleanup) })
(err, path19, cleanup) => err ? cb(err) : cb(void 0, { path: path19, cleanup: promisify2(cleanup) })
)
);
module2.exports.dir = async (options) => dirWithOptions(options);
@@ -118471,7 +118470,7 @@ var require_tmp_promise = __commonJS({
}
};
module2.exports.tmpNameSync = tmp.tmpNameSync;
module2.exports.tmpName = promisify3(tmp.tmpName);
module2.exports.tmpName = promisify2(tmp.tmpName);
module2.exports.tmpdir = tmp.tmpdir;
module2.exports.setGracefulCleanup = tmp.setGracefulCleanup;
}
@@ -124424,160 +124423,26 @@ var io2 = __toESM(require_io());
// src/util.ts
var fs4 = __toESM(require("fs"));
var fsPromises4 = __toESM(require("fs/promises"));
var path5 = __toESM(require("path"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/del/index.js
var import_promises5 = __toESM(require("node:fs/promises"), 1);
var import_node_path6 = __toESM(require("node:path"), 1);
var import_node_process5 = __toESM(require("node:process"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_process4 = __toESM(require("node:process"), 1);
// node_modules/globby/index.js
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_fs3 = __toESM(require("node:fs"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
// node_modules/globby/node_modules/@sindresorhus/merge-streams/index.js
var import_node_events = require("node:events");
var import_node_stream = require("node:stream");
var import_promises2 = require("node:stream/promises");
var import_promises = require("node:stream/promises");
function mergeStreams(streams) {
if (!Array.isArray(streams)) {
throw new TypeError(`Expected an array, got \`${typeof streams}\`.`);
@@ -124652,7 +124517,7 @@ var onMergedStreamFinished = async (passThroughStream, streams) => {
}
};
var onMergedStreamEnd = async (passThroughStream, { signal }) => {
await (0, import_promises2.finished)(passThroughStream, { signal, cleanup: true });
await (0, import_promises.finished)(passThroughStream, { signal, cleanup: true });
};
var onInputStreamsUnpipe = async (passThroughStream, streams, { signal }) => {
for await (const [unpipedStream] of (0, import_node_events.on)(passThroughStream, "unpipe", { signal })) {
@@ -124702,7 +124567,7 @@ var afterMergedStreamFinished = async (onFinished, stream2) => {
};
var onInputStreamEnd = async ({ passThroughStream, stream: stream2, streams, ended, aborted, controller: { signal } }) => {
try {
await (0, import_promises2.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
await (0, import_promises.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
if (streams.has(stream2)) {
ended.add(stream2);
}
@@ -124756,13 +124621,13 @@ var import_fast_glob2 = __toESM(require_out4(), 1);
// node_modules/path-type/index.js
var import_node_fs = __toESM(require("node:fs"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_promises2 = __toESM(require("node:fs/promises"), 1);
async function isType(fsStatType, statsMethodName, filePath) {
if (typeof filePath !== "string") {
throw new TypeError(`Expected a string, got ${typeof filePath}`);
}
try {
const stats = await import_promises3.default[fsStatType](filePath);
const stats = await import_promises2.default[fsStatType](filePath);
return stats[statsMethodName]();
} catch (error2) {
if (error2.code === "ENOENT") {
@@ -124792,20 +124657,20 @@ var isDirectorySync = isTypeSync.bind(void 0, "statSync", "isDirectory");
var isSymlinkSync = isTypeSync.bind(void 0, "lstatSync", "isSymbolicLink");
// node_modules/unicorn-magic/node.js
var import_node_util2 = require("node:util");
var import_node_child_process2 = require("node:child_process");
var import_node_util = require("node:util");
var import_node_child_process = require("node:child_process");
var import_node_url = require("node:url");
var execFileOriginal = (0, import_node_util2.promisify)(import_node_child_process2.execFile);
var execFileOriginal = (0, import_node_util.promisify)(import_node_child_process.execFile);
function toPath(urlOrPath) {
return urlOrPath instanceof URL ? (0, import_node_url.fileURLToPath)(urlOrPath) : urlOrPath;
}
var TEN_MEGABYTES_IN_BYTES = 10 * 1024 * 1024;
// node_modules/globby/ignore.js
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_process = __toESM(require("node:process"), 1);
var import_node_fs2 = __toESM(require("node:fs"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_node_path = __toESM(require("node:path"), 1);
var import_fast_glob = __toESM(require_out4(), 1);
var import_ignore = __toESM(require_ignore(), 1);
@@ -124833,16 +124698,16 @@ var ignoreFilesGlobOptions = {
dot: true
};
var GITIGNORE_FILES_PATTERN = "**/.gitignore";
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path2.default.posix.join(base, pattern.slice(1)) : import_node_path2.default.posix.join(base, pattern);
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path.default.posix.join(base, pattern.slice(1)) : import_node_path.default.posix.join(base, pattern);
var parseIgnoreFile = (file, cwd) => {
const base = slash(import_node_path2.default.relative(cwd, import_node_path2.default.dirname(file.filePath)));
const base = slash(import_node_path.default.relative(cwd, import_node_path.default.dirname(file.filePath)));
return file.content.split(/\r?\n/).filter((line) => line && !line.startsWith("#")).map((pattern) => applyBaseToPattern(pattern, base));
};
var toRelativePath = (fileOrDirectory, cwd) => {
cwd = slash(cwd);
if (import_node_path2.default.isAbsolute(fileOrDirectory)) {
if (import_node_path.default.isAbsolute(fileOrDirectory)) {
if (slash(fileOrDirectory).startsWith(cwd)) {
return import_node_path2.default.relative(cwd, fileOrDirectory);
return import_node_path.default.relative(cwd, fileOrDirectory);
}
throw new Error(`Path ${fileOrDirectory} is not in cwd ${cwd}`);
}
@@ -124858,7 +124723,7 @@ var getIsIgnoredPredicate = (files, cwd) => {
};
};
var normalizeOptions = (options = {}) => ({
cwd: toPath(options.cwd) ?? import_node_process2.default.cwd(),
cwd: toPath(options.cwd) ?? import_node_process.default.cwd(),
suppressErrors: Boolean(options.suppressErrors),
deep: typeof options.deep === "number" ? options.deep : Number.POSITIVE_INFINITY,
ignore: [...options.ignore ?? [], ...defaultIgnoredDirectories]
@@ -124875,7 +124740,7 @@ var isIgnoredByIgnoreFiles = async (patterns, options) => {
const files = await Promise.all(
paths.map(async (filePath) => ({
filePath,
content: await import_promises4.default.readFile(filePath, "utf8")
content: await import_promises3.default.readFile(filePath, "utf8")
}))
);
return getIsIgnoredPredicate(files, cwd);
@@ -124904,14 +124769,14 @@ var assertPatternsInput = (patterns) => {
};
var normalizePathForDirectoryGlob = (filePath, cwd) => {
const path19 = isNegativePattern(filePath) ? filePath.slice(1) : filePath;
return import_node_path3.default.isAbsolute(path19) ? path19 : import_node_path3.default.join(cwd, path19);
return import_node_path2.default.isAbsolute(path19) ? path19 : import_node_path2.default.join(cwd, path19);
};
var getDirectoryGlob = ({ directoryPath, files, extensions }) => {
const extensionGlob = extensions?.length > 0 ? `.${extensions.length > 1 ? `{${extensions.join(",")}}` : extensions[0]}` : "";
return files ? files.map((file) => import_node_path3.default.posix.join(directoryPath, `**/${import_node_path3.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path3.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
return files ? files.map((file) => import_node_path2.default.posix.join(directoryPath, `**/${import_node_path2.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path2.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
};
var directoryToGlob = async (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => {
@@ -124921,7 +124786,7 @@ var directoryToGlob = async (directoryPaths, {
return globs.flat();
};
var directoryToGlobSync = (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => directoryPaths.flatMap((directoryPath) => isDirectorySync(normalizePathForDirectoryGlob(directoryPath, cwd)) ? getDirectoryGlob({ directoryPath, files, extensions }) : directoryPath);
@@ -124979,7 +124844,7 @@ var getFilterSync = (options) => {
var createFilterFunction = (isIgnored) => {
const seen = /* @__PURE__ */ new Set();
return (fastGlobResult) => {
const pathKey = import_node_path3.default.normalize(fastGlobResult.path ?? fastGlobResult);
const pathKey = import_node_path2.default.normalize(fastGlobResult.path ?? fastGlobResult);
if (seen.has(pathKey) || isIgnored && isIgnored(pathKey)) {
return false;
}
@@ -125090,12 +124955,12 @@ var { convertPathToPattern } = import_fast_glob2.default;
var import_is_glob = __toESM(require_is_glob(), 1);
// node_modules/is-path-cwd/index.js
var import_node_process4 = __toESM(require("node:process"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
function isPathCwd(path_) {
let cwd = import_node_process4.default.cwd();
path_ = import_node_path4.default.resolve(path_);
if (import_node_process4.default.platform === "win32") {
let cwd = import_node_process3.default.cwd();
path_ = import_node_path3.default.resolve(path_);
if (import_node_process3.default.platform === "win32") {
cwd = cwd.toLowerCase();
path_ = path_.toLowerCase();
}
@@ -125103,11 +124968,11 @@ function isPathCwd(path_) {
}
// node_modules/del/node_modules/is-path-inside/index.js
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
function isPathInside(childPath, parentPath) {
const relation = import_node_path5.default.relative(parentPath, childPath);
const relation = import_node_path4.default.relative(parentPath, childPath);
return Boolean(
relation && relation !== ".." && !relation.startsWith(`..${import_node_path5.default.sep}`) && relation !== import_node_path5.default.resolve(childPath)
relation && relation !== ".." && !relation.startsWith(`..${import_node_path4.default.sep}`) && relation !== import_node_path4.default.resolve(childPath)
);
}
@@ -125246,14 +125111,14 @@ function safeCheck(file, cwd) {
function normalizePatterns(patterns) {
patterns = Array.isArray(patterns) ? patterns : [patterns];
patterns = patterns.map((pattern) => {
if (import_node_process5.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
if (import_node_process4.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
return slash(pattern);
}
return pattern;
});
return patterns;
}
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5.default.cwd(), onProgress = () => {
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process4.default.cwd(), onProgress = () => {
}, ...options } = {}) {
options = {
expandDirectories: false,
@@ -125274,12 +125139,12 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
let deletedCount = 0;
const mapper = async (file) => {
file = import_node_path6.default.resolve(cwd, file);
file = import_node_path5.default.resolve(cwd, file);
if (!force) {
safeCheck(file, cwd);
}
if (!dryRun) {
await import_promises5.default.rm(file, { recursive: true, force: true });
await import_promises4.default.rm(file, { recursive: true, force: true });
}
deletedCount += 1;
onProgress({
@@ -125296,7 +125161,7 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
// node_modules/get-folder-size/index.js
var import_node_path7 = require("node:path");
var import_node_path6 = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -125321,7 +125186,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path7.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path6.join)(itemPath, directoryItem))
)
);
}
@@ -128157,16 +128022,14 @@ function getErrorMessage(error2) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises4.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -128175,8 +128038,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -128196,34 +128059,6 @@ function satisfiesGHESVersion(ghesVersion, range, defaultIfInvalid) {
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
async function cleanUpGlob(glob2, name, logger) {
logger.debug(`Cleaning up ${name}.`);
try {
@@ -128865,7 +128700,7 @@ function getCliConfigCategoryIfExists(cliError) {
}
function isUnsupportedPlatform() {
return !SUPPORTED_PLATFORMS.some(
([platform2, arch2]) => platform2 === process.platform && arch2 === process.arch
([platform, arch2]) => platform === process.platform && arch2 === process.arch
);
}
function getUnsupportedPlatformError(cliError) {
@@ -130183,17 +130018,17 @@ function getCodeQLBundleExtension(compressionMethod) {
}
function getCodeQLBundleName(compressionMethod) {
const extension = getCodeQLBundleExtension(compressionMethod);
let platform2;
let platform;
if (process.platform === "win32") {
platform2 = "win64";
platform = "win64";
} else if (process.platform === "linux") {
platform2 = "linux64";
platform = "linux64";
} else if (process.platform === "darwin") {
platform2 = "osx64";
platform = "osx64";
} else {
return `codeql-bundle${extension}`;
}
return `codeql-bundle-${platform2}${extension}`;
return `codeql-bundle-${platform}${extension}`;
}
function getCodeQLActionRepository(logger) {
if (isRunningLocalAction()) {
@@ -130227,12 +130062,12 @@ async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod
}
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release3 = await getApiClient().rest.repos.getReleaseByTag({
const release2 = await getApiClient().rest.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: tagName
});
for (const asset of release3.data.assets) {
for (const asset of release2.data.assets) {
if (asset.name === codeQLBundleName) {
logger.info(
`Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.`
@@ -130672,14 +130507,14 @@ async function getNightlyToolsUrl(logger) {
zstdAvailability.available
) ? "zstd" : "gzip";
try {
const release3 = await getApiClient().rest.repos.listReleases({
const release2 = await getApiClient().rest.repos.listReleases({
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
per_page: 1,
page: 1,
prerelease: true
});
const latestRelease = release3.data[0];
const latestRelease = release2.data[0];
if (!latestRelease) {
throw new Error("Could not find the latest nightly release.");
}

349
lib/init-action.js generated
View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -20153,8 +20153,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -20165,7 +20165,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -20256,7 +20256,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -20322,7 +20322,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -20332,7 +20332,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -30499,13 +30499,13 @@ var require_reusify = __commonJS({
current.next = null;
return current;
}
function release3(obj) {
function release2(obj) {
tail.next = obj;
tail = obj;
}
return {
get,
release: release3
release: release2
};
}
module2.exports = reusify;
@@ -30574,7 +30574,7 @@ var require_queue = __commonJS({
self2.paused = false;
for (var i = 0; i < self2.concurrency; i++) {
_running++;
release3();
release2();
}
}
function idle() {
@@ -30583,7 +30583,7 @@ var require_queue = __commonJS({
function push(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -30603,7 +30603,7 @@ var require_queue = __commonJS({
function unshift(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -30620,7 +30620,7 @@ var require_queue = __commonJS({
worker.call(context2, current.value, current.worked);
}
}
function release3(holder) {
function release2(holder) {
if (holder) {
cache.release(holder);
}
@@ -32347,7 +32347,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -37437,8 +37436,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -37514,20 +37513,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -61009,7 +61008,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -62817,7 +62816,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -62964,7 +62963,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -71810,7 +71809,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -71826,7 +71825,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -83261,161 +83260,27 @@ var io2 = __toESM(require_io());
// src/util.ts
var fs4 = __toESM(require("fs"));
var fsPromises4 = __toESM(require("fs/promises"));
var os = __toESM(require("os"));
var path5 = __toESM(require("path"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/del/index.js
var import_promises5 = __toESM(require("node:fs/promises"), 1);
var import_node_path6 = __toESM(require("node:path"), 1);
var import_node_process5 = __toESM(require("node:process"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_process4 = __toESM(require("node:process"), 1);
// node_modules/globby/index.js
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_fs3 = __toESM(require("node:fs"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
// node_modules/globby/node_modules/@sindresorhus/merge-streams/index.js
var import_node_events = require("node:events");
var import_node_stream = require("node:stream");
var import_promises2 = require("node:stream/promises");
var import_promises = require("node:stream/promises");
function mergeStreams(streams) {
if (!Array.isArray(streams)) {
throw new TypeError(`Expected an array, got \`${typeof streams}\`.`);
@@ -83490,7 +83355,7 @@ var onMergedStreamFinished = async (passThroughStream, streams) => {
}
};
var onMergedStreamEnd = async (passThroughStream, { signal }) => {
await (0, import_promises2.finished)(passThroughStream, { signal, cleanup: true });
await (0, import_promises.finished)(passThroughStream, { signal, cleanup: true });
};
var onInputStreamsUnpipe = async (passThroughStream, streams, { signal }) => {
for await (const [unpipedStream] of (0, import_node_events.on)(passThroughStream, "unpipe", { signal })) {
@@ -83540,7 +83405,7 @@ var afterMergedStreamFinished = async (onFinished, stream2) => {
};
var onInputStreamEnd = async ({ passThroughStream, stream: stream2, streams, ended, aborted, controller: { signal } }) => {
try {
await (0, import_promises2.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
await (0, import_promises.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
if (streams.has(stream2)) {
ended.add(stream2);
}
@@ -83594,13 +83459,13 @@ var import_fast_glob2 = __toESM(require_out4(), 1);
// node_modules/path-type/index.js
var import_node_fs = __toESM(require("node:fs"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_promises2 = __toESM(require("node:fs/promises"), 1);
async function isType(fsStatType, statsMethodName, filePath) {
if (typeof filePath !== "string") {
throw new TypeError(`Expected a string, got ${typeof filePath}`);
}
try {
const stats = await import_promises3.default[fsStatType](filePath);
const stats = await import_promises2.default[fsStatType](filePath);
return stats[statsMethodName]();
} catch (error2) {
if (error2.code === "ENOENT") {
@@ -83630,20 +83495,20 @@ var isDirectorySync = isTypeSync.bind(void 0, "statSync", "isDirectory");
var isSymlinkSync = isTypeSync.bind(void 0, "lstatSync", "isSymbolicLink");
// node_modules/unicorn-magic/node.js
var import_node_util2 = require("node:util");
var import_node_child_process2 = require("node:child_process");
var import_node_util = require("node:util");
var import_node_child_process = require("node:child_process");
var import_node_url = require("node:url");
var execFileOriginal = (0, import_node_util2.promisify)(import_node_child_process2.execFile);
var execFileOriginal = (0, import_node_util.promisify)(import_node_child_process.execFile);
function toPath(urlOrPath) {
return urlOrPath instanceof URL ? (0, import_node_url.fileURLToPath)(urlOrPath) : urlOrPath;
}
var TEN_MEGABYTES_IN_BYTES = 10 * 1024 * 1024;
// node_modules/globby/ignore.js
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_process = __toESM(require("node:process"), 1);
var import_node_fs2 = __toESM(require("node:fs"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_node_path = __toESM(require("node:path"), 1);
var import_fast_glob = __toESM(require_out4(), 1);
var import_ignore = __toESM(require_ignore(), 1);
@@ -83671,16 +83536,16 @@ var ignoreFilesGlobOptions = {
dot: true
};
var GITIGNORE_FILES_PATTERN = "**/.gitignore";
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path2.default.posix.join(base, pattern.slice(1)) : import_node_path2.default.posix.join(base, pattern);
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path.default.posix.join(base, pattern.slice(1)) : import_node_path.default.posix.join(base, pattern);
var parseIgnoreFile = (file, cwd) => {
const base = slash(import_node_path2.default.relative(cwd, import_node_path2.default.dirname(file.filePath)));
const base = slash(import_node_path.default.relative(cwd, import_node_path.default.dirname(file.filePath)));
return file.content.split(/\r?\n/).filter((line) => line && !line.startsWith("#")).map((pattern) => applyBaseToPattern(pattern, base));
};
var toRelativePath = (fileOrDirectory, cwd) => {
cwd = slash(cwd);
if (import_node_path2.default.isAbsolute(fileOrDirectory)) {
if (import_node_path.default.isAbsolute(fileOrDirectory)) {
if (slash(fileOrDirectory).startsWith(cwd)) {
return import_node_path2.default.relative(cwd, fileOrDirectory);
return import_node_path.default.relative(cwd, fileOrDirectory);
}
throw new Error(`Path ${fileOrDirectory} is not in cwd ${cwd}`);
}
@@ -83696,7 +83561,7 @@ var getIsIgnoredPredicate = (files, cwd) => {
};
};
var normalizeOptions = (options = {}) => ({
cwd: toPath(options.cwd) ?? import_node_process2.default.cwd(),
cwd: toPath(options.cwd) ?? import_node_process.default.cwd(),
suppressErrors: Boolean(options.suppressErrors),
deep: typeof options.deep === "number" ? options.deep : Number.POSITIVE_INFINITY,
ignore: [...options.ignore ?? [], ...defaultIgnoredDirectories]
@@ -83713,7 +83578,7 @@ var isIgnoredByIgnoreFiles = async (patterns, options) => {
const files = await Promise.all(
paths.map(async (filePath) => ({
filePath,
content: await import_promises4.default.readFile(filePath, "utf8")
content: await import_promises3.default.readFile(filePath, "utf8")
}))
);
return getIsIgnoredPredicate(files, cwd);
@@ -83742,14 +83607,14 @@ var assertPatternsInput = (patterns) => {
};
var normalizePathForDirectoryGlob = (filePath, cwd) => {
const path20 = isNegativePattern(filePath) ? filePath.slice(1) : filePath;
return import_node_path3.default.isAbsolute(path20) ? path20 : import_node_path3.default.join(cwd, path20);
return import_node_path2.default.isAbsolute(path20) ? path20 : import_node_path2.default.join(cwd, path20);
};
var getDirectoryGlob = ({ directoryPath, files, extensions }) => {
const extensionGlob = extensions?.length > 0 ? `.${extensions.length > 1 ? `{${extensions.join(",")}}` : extensions[0]}` : "";
return files ? files.map((file) => import_node_path3.default.posix.join(directoryPath, `**/${import_node_path3.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path3.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
return files ? files.map((file) => import_node_path2.default.posix.join(directoryPath, `**/${import_node_path2.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path2.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
};
var directoryToGlob = async (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => {
@@ -83759,7 +83624,7 @@ var directoryToGlob = async (directoryPaths, {
return globs.flat();
};
var directoryToGlobSync = (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => directoryPaths.flatMap((directoryPath) => isDirectorySync(normalizePathForDirectoryGlob(directoryPath, cwd)) ? getDirectoryGlob({ directoryPath, files, extensions }) : directoryPath);
@@ -83817,7 +83682,7 @@ var getFilterSync = (options) => {
var createFilterFunction = (isIgnored) => {
const seen = /* @__PURE__ */ new Set();
return (fastGlobResult) => {
const pathKey = import_node_path3.default.normalize(fastGlobResult.path ?? fastGlobResult);
const pathKey = import_node_path2.default.normalize(fastGlobResult.path ?? fastGlobResult);
if (seen.has(pathKey) || isIgnored && isIgnored(pathKey)) {
return false;
}
@@ -83928,12 +83793,12 @@ var { convertPathToPattern } = import_fast_glob2.default;
var import_is_glob = __toESM(require_is_glob(), 1);
// node_modules/is-path-cwd/index.js
var import_node_process4 = __toESM(require("node:process"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
function isPathCwd(path_) {
let cwd = import_node_process4.default.cwd();
path_ = import_node_path4.default.resolve(path_);
if (import_node_process4.default.platform === "win32") {
let cwd = import_node_process3.default.cwd();
path_ = import_node_path3.default.resolve(path_);
if (import_node_process3.default.platform === "win32") {
cwd = cwd.toLowerCase();
path_ = path_.toLowerCase();
}
@@ -83941,11 +83806,11 @@ function isPathCwd(path_) {
}
// node_modules/del/node_modules/is-path-inside/index.js
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
function isPathInside(childPath, parentPath) {
const relation = import_node_path5.default.relative(parentPath, childPath);
const relation = import_node_path4.default.relative(parentPath, childPath);
return Boolean(
relation && relation !== ".." && !relation.startsWith(`..${import_node_path5.default.sep}`) && relation !== import_node_path5.default.resolve(childPath)
relation && relation !== ".." && !relation.startsWith(`..${import_node_path4.default.sep}`) && relation !== import_node_path4.default.resolve(childPath)
);
}
@@ -84084,14 +83949,14 @@ function safeCheck(file, cwd) {
function normalizePatterns(patterns) {
patterns = Array.isArray(patterns) ? patterns : [patterns];
patterns = patterns.map((pattern) => {
if (import_node_process5.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
if (import_node_process4.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
return slash(pattern);
}
return pattern;
});
return patterns;
}
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5.default.cwd(), onProgress = () => {
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process4.default.cwd(), onProgress = () => {
}, ...options } = {}) {
options = {
expandDirectories: false,
@@ -84112,12 +83977,12 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
let deletedCount = 0;
const mapper = async (file) => {
file = import_node_path6.default.resolve(cwd, file);
file = import_node_path5.default.resolve(cwd, file);
if (!force) {
safeCheck(file, cwd);
}
if (!dryRun) {
await import_promises5.default.rm(file, { recursive: true, force: true });
await import_promises4.default.rm(file, { recursive: true, force: true });
}
deletedCount += 1;
onProgress({
@@ -84134,7 +83999,7 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
// node_modules/get-folder-size/index.js
var import_node_path7 = require("node:path");
var import_node_path6 = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -84159,7 +84024,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path7.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path6.join)(itemPath, directoryItem))
)
);
}
@@ -86801,8 +86666,8 @@ function getExtraOptionsEnvParam() {
);
}
}
function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, platform3) {
const fixedAmount = 1024 * (platform3 === "win32" ? 1.5 : 1);
function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, platform2) {
const fixedAmount = 1024 * (platform2 === "win32" ? 1.5 : 1);
const scaledAmount = getReservedRamScaleFactor() * Math.max(totalMemoryMegaBytes - 8 * 1024, 0);
return fixedAmount + scaledAmount;
}
@@ -86816,7 +86681,7 @@ function getReservedRamScaleFactor() {
}
return envVar / 100;
}
function getMemoryFlagValueForPlatform(userInput, totalMemoryBytes, platform3) {
function getMemoryFlagValueForPlatform(userInput, totalMemoryBytes, platform2) {
let memoryToUseMegaBytes;
if (userInput) {
memoryToUseMegaBytes = Number(userInput);
@@ -86829,7 +86694,7 @@ function getMemoryFlagValueForPlatform(userInput, totalMemoryBytes, platform3) {
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes(
totalMemoryMegaBytes,
platform3
platform2
);
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
}
@@ -87196,16 +87061,14 @@ function prettyPrintPack(pack) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises4.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -87214,8 +87077,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -87255,34 +87118,6 @@ var BuildMode = /* @__PURE__ */ ((BuildMode3) => {
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
async function cleanUpGlob(glob2, name, logger) {
logger.debug(`Cleaning up ${name}.`);
try {
@@ -90143,7 +89978,7 @@ function getCliConfigCategoryIfExists(cliError) {
}
function isUnsupportedPlatform() {
return !SUPPORTED_PLATFORMS.some(
([platform3, arch2]) => platform3 === process.platform && arch2 === process.arch
([platform2, arch2]) => platform2 === process.platform && arch2 === process.arch
);
}
function getUnsupportedPlatformError(cliError) {
@@ -90505,17 +90340,17 @@ function getCodeQLBundleExtension(compressionMethod) {
}
function getCodeQLBundleName(compressionMethod) {
const extension = getCodeQLBundleExtension(compressionMethod);
let platform3;
let platform2;
if (process.platform === "win32") {
platform3 = "win64";
platform2 = "win64";
} else if (process.platform === "linux") {
platform3 = "linux64";
platform2 = "linux64";
} else if (process.platform === "darwin") {
platform3 = "osx64";
platform2 = "osx64";
} else {
return `codeql-bundle${extension}`;
}
return `codeql-bundle-${platform3}${extension}`;
return `codeql-bundle-${platform2}${extension}`;
}
function getCodeQLActionRepository(logger) {
if (isRunningLocalAction()) {
@@ -90549,12 +90384,12 @@ async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod
}
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release3 = await getApiClient().rest.repos.getReleaseByTag({
const release2 = await getApiClient().rest.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: tagName
});
for (const asset of release3.data.assets) {
for (const asset of release2.data.assets) {
if (asset.name === codeQLBundleName) {
logger.info(
`Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.`
@@ -90994,14 +90829,14 @@ async function getNightlyToolsUrl(logger) {
zstdAvailability.available
) ? "zstd" : "gzip";
try {
const release3 = await getApiClient().rest.repos.listReleases({
const release2 = await getApiClient().rest.repos.listReleases({
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
per_page: 1,
page: 1,
prerelease: true
});
const latestRelease = release3.data[0];
const latestRelease = release2.data[0];
if (!latestRelease) {
throw new Error("Could not find the latest nightly release.");
}

View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify2 } = require("util");
var { promisify } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify2(this[kOriginalClose])();
await promisify(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify2 } = require("util");
var { promisify } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify2(this[kOriginalClose])();
await promisify(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -24862,8 +24862,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -24874,7 +24874,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -24965,7 +24965,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -25031,7 +25031,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -25041,7 +25041,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -26498,7 +26498,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -31437,8 +31436,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -31514,20 +31513,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -55009,7 +55008,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -56817,7 +56816,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -56964,7 +56963,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -65810,7 +65809,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -65826,7 +65825,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -76104,148 +76103,14 @@ var github = __toESM(require_github());
var io2 = __toESM(require_io());
// src/util.ts
var fsPromises = __toESM(require("fs/promises"));
var path = __toESM(require("path"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/get-folder-size/index.js
var import_node_path2 = require("node:path");
var import_node_path = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -76270,7 +76135,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path2.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path.join)(itemPath, directoryItem))
)
);
}
@@ -79051,16 +78916,14 @@ function getErrorMessage(error2) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -79069,8 +78932,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -79096,34 +78959,6 @@ function checkActionVersion(version, githubVersion) {
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
async function asyncSome(array, predicate) {
const results = await Promise.all(array.map(predicate));
return results.some((result) => result);
@@ -79578,7 +79413,7 @@ function getCliConfigCategoryIfExists(cliError) {
}
function isUnsupportedPlatform() {
return !SUPPORTED_PLATFORMS.some(
([platform2, arch]) => platform2 === process.platform && arch === process.arch
([platform, arch]) => platform === process.platform && arch === process.arch
);
}
function getUnsupportedPlatformError(cliError) {

View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -28569,13 +28569,13 @@ var require_reusify = __commonJS({
current.next = null;
return current;
}
function release3(obj) {
function release2(obj) {
tail.next = obj;
tail = obj;
}
return {
get,
release: release3
release: release2
};
}
module2.exports = reusify;
@@ -28644,7 +28644,7 @@ var require_queue = __commonJS({
self2.paused = false;
for (var i = 0; i < self2.concurrency; i++) {
_running++;
release3();
release2();
}
}
function idle() {
@@ -28653,7 +28653,7 @@ var require_queue = __commonJS({
function push(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28673,7 +28673,7 @@ var require_queue = __commonJS({
function unshift(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28690,7 +28690,7 @@ var require_queue = __commonJS({
worker.call(context2, current.value, current.worked);
}
}
function release3(holder) {
function release2(holder) {
if (holder) {
cache.release(holder);
}
@@ -30711,8 +30711,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -30723,7 +30723,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -30814,7 +30814,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -30880,7 +30880,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -30890,7 +30890,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -32347,7 +32347,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -35989,8 +35988,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -36066,20 +36065,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -59561,7 +59560,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -61369,7 +61368,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -61516,7 +61515,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -70362,7 +70361,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -70378,7 +70377,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -82008,160 +82007,26 @@ var github = __toESM(require_github());
var io2 = __toESM(require_io());
// src/util.ts
var fsPromises4 = __toESM(require("fs/promises"));
var path5 = __toESM(require("path"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/del/index.js
var import_promises5 = __toESM(require("node:fs/promises"), 1);
var import_node_path6 = __toESM(require("node:path"), 1);
var import_node_process5 = __toESM(require("node:process"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_process4 = __toESM(require("node:process"), 1);
// node_modules/globby/index.js
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_fs3 = __toESM(require("node:fs"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
// node_modules/globby/node_modules/@sindresorhus/merge-streams/index.js
var import_node_events = require("node:events");
var import_node_stream = require("node:stream");
var import_promises2 = require("node:stream/promises");
var import_promises = require("node:stream/promises");
function mergeStreams(streams) {
if (!Array.isArray(streams)) {
throw new TypeError(`Expected an array, got \`${typeof streams}\`.`);
@@ -82236,7 +82101,7 @@ var onMergedStreamFinished = async (passThroughStream, streams) => {
}
};
var onMergedStreamEnd = async (passThroughStream, { signal }) => {
await (0, import_promises2.finished)(passThroughStream, { signal, cleanup: true });
await (0, import_promises.finished)(passThroughStream, { signal, cleanup: true });
};
var onInputStreamsUnpipe = async (passThroughStream, streams, { signal }) => {
for await (const [unpipedStream] of (0, import_node_events.on)(passThroughStream, "unpipe", { signal })) {
@@ -82286,7 +82151,7 @@ var afterMergedStreamFinished = async (onFinished, stream2) => {
};
var onInputStreamEnd = async ({ passThroughStream, stream: stream2, streams, ended, aborted, controller: { signal } }) => {
try {
await (0, import_promises2.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
await (0, import_promises.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
if (streams.has(stream2)) {
ended.add(stream2);
}
@@ -82340,13 +82205,13 @@ var import_fast_glob2 = __toESM(require_out4(), 1);
// node_modules/path-type/index.js
var import_node_fs = __toESM(require("node:fs"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_promises2 = __toESM(require("node:fs/promises"), 1);
async function isType(fsStatType, statsMethodName, filePath) {
if (typeof filePath !== "string") {
throw new TypeError(`Expected a string, got ${typeof filePath}`);
}
try {
const stats = await import_promises3.default[fsStatType](filePath);
const stats = await import_promises2.default[fsStatType](filePath);
return stats[statsMethodName]();
} catch (error2) {
if (error2.code === "ENOENT") {
@@ -82376,20 +82241,20 @@ var isDirectorySync = isTypeSync.bind(void 0, "statSync", "isDirectory");
var isSymlinkSync = isTypeSync.bind(void 0, "lstatSync", "isSymbolicLink");
// node_modules/unicorn-magic/node.js
var import_node_util2 = require("node:util");
var import_node_child_process2 = require("node:child_process");
var import_node_util = require("node:util");
var import_node_child_process = require("node:child_process");
var import_node_url = require("node:url");
var execFileOriginal = (0, import_node_util2.promisify)(import_node_child_process2.execFile);
var execFileOriginal = (0, import_node_util.promisify)(import_node_child_process.execFile);
function toPath(urlOrPath) {
return urlOrPath instanceof URL ? (0, import_node_url.fileURLToPath)(urlOrPath) : urlOrPath;
}
var TEN_MEGABYTES_IN_BYTES = 10 * 1024 * 1024;
// node_modules/globby/ignore.js
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_process = __toESM(require("node:process"), 1);
var import_node_fs2 = __toESM(require("node:fs"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_node_path = __toESM(require("node:path"), 1);
var import_fast_glob = __toESM(require_out4(), 1);
var import_ignore = __toESM(require_ignore(), 1);
@@ -82417,16 +82282,16 @@ var ignoreFilesGlobOptions = {
dot: true
};
var GITIGNORE_FILES_PATTERN = "**/.gitignore";
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path2.default.posix.join(base, pattern.slice(1)) : import_node_path2.default.posix.join(base, pattern);
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path.default.posix.join(base, pattern.slice(1)) : import_node_path.default.posix.join(base, pattern);
var parseIgnoreFile = (file, cwd) => {
const base = slash(import_node_path2.default.relative(cwd, import_node_path2.default.dirname(file.filePath)));
const base = slash(import_node_path.default.relative(cwd, import_node_path.default.dirname(file.filePath)));
return file.content.split(/\r?\n/).filter((line) => line && !line.startsWith("#")).map((pattern) => applyBaseToPattern(pattern, base));
};
var toRelativePath = (fileOrDirectory, cwd) => {
cwd = slash(cwd);
if (import_node_path2.default.isAbsolute(fileOrDirectory)) {
if (import_node_path.default.isAbsolute(fileOrDirectory)) {
if (slash(fileOrDirectory).startsWith(cwd)) {
return import_node_path2.default.relative(cwd, fileOrDirectory);
return import_node_path.default.relative(cwd, fileOrDirectory);
}
throw new Error(`Path ${fileOrDirectory} is not in cwd ${cwd}`);
}
@@ -82442,7 +82307,7 @@ var getIsIgnoredPredicate = (files, cwd) => {
};
};
var normalizeOptions = (options = {}) => ({
cwd: toPath(options.cwd) ?? import_node_process2.default.cwd(),
cwd: toPath(options.cwd) ?? import_node_process.default.cwd(),
suppressErrors: Boolean(options.suppressErrors),
deep: typeof options.deep === "number" ? options.deep : Number.POSITIVE_INFINITY,
ignore: [...options.ignore ?? [], ...defaultIgnoredDirectories]
@@ -82459,7 +82324,7 @@ var isIgnoredByIgnoreFiles = async (patterns, options) => {
const files = await Promise.all(
paths.map(async (filePath) => ({
filePath,
content: await import_promises4.default.readFile(filePath, "utf8")
content: await import_promises3.default.readFile(filePath, "utf8")
}))
);
return getIsIgnoredPredicate(files, cwd);
@@ -82488,14 +82353,14 @@ var assertPatternsInput = (patterns) => {
};
var normalizePathForDirectoryGlob = (filePath, cwd) => {
const path12 = isNegativePattern(filePath) ? filePath.slice(1) : filePath;
return import_node_path3.default.isAbsolute(path12) ? path12 : import_node_path3.default.join(cwd, path12);
return import_node_path2.default.isAbsolute(path12) ? path12 : import_node_path2.default.join(cwd, path12);
};
var getDirectoryGlob = ({ directoryPath, files, extensions }) => {
const extensionGlob = extensions?.length > 0 ? `.${extensions.length > 1 ? `{${extensions.join(",")}}` : extensions[0]}` : "";
return files ? files.map((file) => import_node_path3.default.posix.join(directoryPath, `**/${import_node_path3.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path3.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
return files ? files.map((file) => import_node_path2.default.posix.join(directoryPath, `**/${import_node_path2.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path2.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
};
var directoryToGlob = async (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => {
@@ -82505,7 +82370,7 @@ var directoryToGlob = async (directoryPaths, {
return globs.flat();
};
var directoryToGlobSync = (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => directoryPaths.flatMap((directoryPath) => isDirectorySync(normalizePathForDirectoryGlob(directoryPath, cwd)) ? getDirectoryGlob({ directoryPath, files, extensions }) : directoryPath);
@@ -82563,7 +82428,7 @@ var getFilterSync = (options) => {
var createFilterFunction = (isIgnored) => {
const seen = /* @__PURE__ */ new Set();
return (fastGlobResult) => {
const pathKey = import_node_path3.default.normalize(fastGlobResult.path ?? fastGlobResult);
const pathKey = import_node_path2.default.normalize(fastGlobResult.path ?? fastGlobResult);
if (seen.has(pathKey) || isIgnored && isIgnored(pathKey)) {
return false;
}
@@ -82674,12 +82539,12 @@ var { convertPathToPattern } = import_fast_glob2.default;
var import_is_glob = __toESM(require_is_glob(), 1);
// node_modules/is-path-cwd/index.js
var import_node_process4 = __toESM(require("node:process"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
function isPathCwd(path_) {
let cwd = import_node_process4.default.cwd();
path_ = import_node_path4.default.resolve(path_);
if (import_node_process4.default.platform === "win32") {
let cwd = import_node_process3.default.cwd();
path_ = import_node_path3.default.resolve(path_);
if (import_node_process3.default.platform === "win32") {
cwd = cwd.toLowerCase();
path_ = path_.toLowerCase();
}
@@ -82687,11 +82552,11 @@ function isPathCwd(path_) {
}
// node_modules/del/node_modules/is-path-inside/index.js
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
function isPathInside(childPath, parentPath) {
const relation = import_node_path5.default.relative(parentPath, childPath);
const relation = import_node_path4.default.relative(parentPath, childPath);
return Boolean(
relation && relation !== ".." && !relation.startsWith(`..${import_node_path5.default.sep}`) && relation !== import_node_path5.default.resolve(childPath)
relation && relation !== ".." && !relation.startsWith(`..${import_node_path4.default.sep}`) && relation !== import_node_path4.default.resolve(childPath)
);
}
@@ -82830,14 +82695,14 @@ function safeCheck(file, cwd) {
function normalizePatterns(patterns) {
patterns = Array.isArray(patterns) ? patterns : [patterns];
patterns = patterns.map((pattern) => {
if (import_node_process5.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
if (import_node_process4.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
return slash(pattern);
}
return pattern;
});
return patterns;
}
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5.default.cwd(), onProgress = () => {
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process4.default.cwd(), onProgress = () => {
}, ...options } = {}) {
options = {
expandDirectories: false,
@@ -82858,12 +82723,12 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
let deletedCount = 0;
const mapper = async (file) => {
file = import_node_path6.default.resolve(cwd, file);
file = import_node_path5.default.resolve(cwd, file);
if (!force) {
safeCheck(file, cwd);
}
if (!dryRun) {
await import_promises5.default.rm(file, { recursive: true, force: true });
await import_promises4.default.rm(file, { recursive: true, force: true });
}
deletedCount += 1;
onProgress({
@@ -82880,7 +82745,7 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
// node_modules/get-folder-size/index.js
var import_node_path7 = require("node:path");
var import_node_path6 = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -82905,7 +82770,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path7.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path6.join)(itemPath, directoryItem))
)
);
}
@@ -85706,16 +85571,14 @@ function getErrorMessage(error2) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises4.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -85724,8 +85587,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -85751,34 +85614,6 @@ function checkActionVersion(version, githubVersion) {
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
async function cleanUpGlob(glob, name, logger) {
logger.debug(`Cleaning up ${name}.`);
try {
@@ -87032,7 +86867,7 @@ function getCliConfigCategoryIfExists(cliError) {
}
function isUnsupportedPlatform() {
return !SUPPORTED_PLATFORMS.some(
([platform2, arch2]) => platform2 === process.platform && arch2 === process.arch
([platform, arch2]) => platform === process.platform && arch2 === process.arch
);
}
function getUnsupportedPlatformError(cliError) {
@@ -87462,17 +87297,17 @@ function getCodeQLBundleExtension(compressionMethod) {
}
function getCodeQLBundleName(compressionMethod) {
const extension = getCodeQLBundleExtension(compressionMethod);
let platform2;
let platform;
if (process.platform === "win32") {
platform2 = "win64";
platform = "win64";
} else if (process.platform === "linux") {
platform2 = "linux64";
platform = "linux64";
} else if (process.platform === "darwin") {
platform2 = "osx64";
platform = "osx64";
} else {
return `codeql-bundle${extension}`;
}
return `codeql-bundle-${platform2}${extension}`;
return `codeql-bundle-${platform}${extension}`;
}
function getCodeQLActionRepository(logger) {
if (isRunningLocalAction()) {
@@ -87506,12 +87341,12 @@ async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod
}
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release3 = await getApiClient().rest.repos.getReleaseByTag({
const release2 = await getApiClient().rest.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: tagName
});
for (const asset of release3.data.assets) {
for (const asset of release2.data.assets) {
if (asset.name === codeQLBundleName) {
logger.info(
`Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.`
@@ -87951,14 +87786,14 @@ async function getNightlyToolsUrl(logger) {
zstdAvailability.available
) ? "zstd" : "gzip";
try {
const release3 = await getApiClient().rest.repos.listReleases({
const release2 = await getApiClient().rest.repos.listReleases({
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
per_page: 1,
page: 1,
prerelease: true
});
const latestRelease = release3.data[0];
const latestRelease = release2.data[0];
if (!latestRelease) {
throw new Error("Could not find the latest nightly release.");
}

View File

@@ -26498,7 +26498,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",

View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify2 } = require("util");
var { promisify } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify2(this[kOriginalClose])();
await promisify(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify2 } = require("util");
var { promisify } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify2(this[kOriginalClose])();
await promisify(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -20153,8 +20153,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -20165,7 +20165,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -20256,7 +20256,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -20322,7 +20322,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -20332,7 +20332,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -45034,7 +45034,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -49973,8 +49972,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -50050,20 +50049,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -73545,7 +73544,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -75353,7 +75352,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -75500,7 +75499,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -84346,7 +84345,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -84362,7 +84361,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -93303,147 +93302,13 @@ var github = __toESM(require_github());
var io2 = __toESM(require_io());
// src/util.ts
var fsPromises = __toESM(require("fs/promises"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/get-folder-size/index.js
var import_node_path2 = require("node:path");
var import_node_path = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -93468,7 +93333,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path2.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path.join)(itemPath, directoryItem))
)
);
}
@@ -96145,16 +96010,14 @@ function getErrorMessage(error2) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -96163,8 +96026,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -96173,34 +96036,6 @@ async function checkDiskUsage(logger) {
return void 0;
}
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
function isDefined(value) {
return value !== void 0 && value !== null;
}
@@ -96486,8 +96321,8 @@ function getCredentials(logger, registrySecrets, registriesCredentials, language
return out;
}
function getProxyPackage() {
const platform2 = process.platform === "win32" ? "win64" : process.platform === "darwin" ? "osx64" : "linux64";
return `${UPDATEJOB_PROXY}-${platform2}.tar.gz`;
const platform = process.platform === "win32" ? "win64" : process.platform === "darwin" ? "osx64" : "linux64";
return `${UPDATEJOB_PROXY}-${platform}.tar.gz`;
}
function getFallbackUrl(proxyPackage) {
return `${UPDATEJOB_PROXY_URL_PREFIX}${proxyPackage}`;

1
lib/upload-lib.js generated
View File

@@ -33644,7 +33644,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",

View File

@@ -26498,7 +26498,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",

View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -28569,13 +28569,13 @@ var require_reusify = __commonJS({
current.next = null;
return current;
}
function release3(obj) {
function release2(obj) {
tail.next = obj;
tail = obj;
}
return {
get,
release: release3
release: release2
};
}
module2.exports = reusify;
@@ -28644,7 +28644,7 @@ var require_queue = __commonJS({
self2.paused = false;
for (var i = 0; i < self2.concurrency; i++) {
_running++;
release3();
release2();
}
}
function idle() {
@@ -28653,7 +28653,7 @@ var require_queue = __commonJS({
function push(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28673,7 +28673,7 @@ var require_queue = __commonJS({
function unshift(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28690,7 +28690,7 @@ var require_queue = __commonJS({
worker.call(context2, current.value, current.worked);
}
}
function release3(holder) {
function release2(holder) {
if (holder) {
cache.release(holder);
}
@@ -30711,8 +30711,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -30723,7 +30723,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -30814,7 +30814,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -30880,7 +30880,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -30890,7 +30890,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -32347,7 +32347,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -35989,8 +35988,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -36066,20 +36065,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -59561,7 +59560,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -61369,7 +61368,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -61516,7 +61515,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -70362,7 +70361,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -70378,7 +70377,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -84850,160 +84849,26 @@ var github = __toESM(require_github());
var io2 = __toESM(require_io());
// src/util.ts
var fsPromises4 = __toESM(require("fs/promises"));
var path5 = __toESM(require("path"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/del/index.js
var import_promises5 = __toESM(require("node:fs/promises"), 1);
var import_node_path6 = __toESM(require("node:path"), 1);
var import_node_process5 = __toESM(require("node:process"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_process4 = __toESM(require("node:process"), 1);
// node_modules/globby/index.js
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_fs3 = __toESM(require("node:fs"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
// node_modules/globby/node_modules/@sindresorhus/merge-streams/index.js
var import_node_events = require("node:events");
var import_node_stream = require("node:stream");
var import_promises2 = require("node:stream/promises");
var import_promises = require("node:stream/promises");
function mergeStreams(streams) {
if (!Array.isArray(streams)) {
throw new TypeError(`Expected an array, got \`${typeof streams}\`.`);
@@ -85078,7 +84943,7 @@ var onMergedStreamFinished = async (passThroughStream, streams) => {
}
};
var onMergedStreamEnd = async (passThroughStream, { signal }) => {
await (0, import_promises2.finished)(passThroughStream, { signal, cleanup: true });
await (0, import_promises.finished)(passThroughStream, { signal, cleanup: true });
};
var onInputStreamsUnpipe = async (passThroughStream, streams, { signal }) => {
for await (const [unpipedStream] of (0, import_node_events.on)(passThroughStream, "unpipe", { signal })) {
@@ -85128,7 +84993,7 @@ var afterMergedStreamFinished = async (onFinished, stream2) => {
};
var onInputStreamEnd = async ({ passThroughStream, stream: stream2, streams, ended, aborted, controller: { signal } }) => {
try {
await (0, import_promises2.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
await (0, import_promises.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
if (streams.has(stream2)) {
ended.add(stream2);
}
@@ -85182,13 +85047,13 @@ var import_fast_glob2 = __toESM(require_out4(), 1);
// node_modules/path-type/index.js
var import_node_fs = __toESM(require("node:fs"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_promises2 = __toESM(require("node:fs/promises"), 1);
async function isType(fsStatType, statsMethodName, filePath) {
if (typeof filePath !== "string") {
throw new TypeError(`Expected a string, got ${typeof filePath}`);
}
try {
const stats = await import_promises3.default[fsStatType](filePath);
const stats = await import_promises2.default[fsStatType](filePath);
return stats[statsMethodName]();
} catch (error2) {
if (error2.code === "ENOENT") {
@@ -85218,20 +85083,20 @@ var isDirectorySync = isTypeSync.bind(void 0, "statSync", "isDirectory");
var isSymlinkSync = isTypeSync.bind(void 0, "lstatSync", "isSymbolicLink");
// node_modules/unicorn-magic/node.js
var import_node_util2 = require("node:util");
var import_node_child_process2 = require("node:child_process");
var import_node_util = require("node:util");
var import_node_child_process = require("node:child_process");
var import_node_url = require("node:url");
var execFileOriginal = (0, import_node_util2.promisify)(import_node_child_process2.execFile);
var execFileOriginal = (0, import_node_util.promisify)(import_node_child_process.execFile);
function toPath(urlOrPath) {
return urlOrPath instanceof URL ? (0, import_node_url.fileURLToPath)(urlOrPath) : urlOrPath;
}
var TEN_MEGABYTES_IN_BYTES = 10 * 1024 * 1024;
// node_modules/globby/ignore.js
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_process = __toESM(require("node:process"), 1);
var import_node_fs2 = __toESM(require("node:fs"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_node_path = __toESM(require("node:path"), 1);
var import_fast_glob = __toESM(require_out4(), 1);
var import_ignore = __toESM(require_ignore(), 1);
@@ -85259,16 +85124,16 @@ var ignoreFilesGlobOptions = {
dot: true
};
var GITIGNORE_FILES_PATTERN = "**/.gitignore";
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path2.default.posix.join(base, pattern.slice(1)) : import_node_path2.default.posix.join(base, pattern);
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path.default.posix.join(base, pattern.slice(1)) : import_node_path.default.posix.join(base, pattern);
var parseIgnoreFile = (file, cwd) => {
const base = slash(import_node_path2.default.relative(cwd, import_node_path2.default.dirname(file.filePath)));
const base = slash(import_node_path.default.relative(cwd, import_node_path.default.dirname(file.filePath)));
return file.content.split(/\r?\n/).filter((line) => line && !line.startsWith("#")).map((pattern) => applyBaseToPattern(pattern, base));
};
var toRelativePath = (fileOrDirectory, cwd) => {
cwd = slash(cwd);
if (import_node_path2.default.isAbsolute(fileOrDirectory)) {
if (import_node_path.default.isAbsolute(fileOrDirectory)) {
if (slash(fileOrDirectory).startsWith(cwd)) {
return import_node_path2.default.relative(cwd, fileOrDirectory);
return import_node_path.default.relative(cwd, fileOrDirectory);
}
throw new Error(`Path ${fileOrDirectory} is not in cwd ${cwd}`);
}
@@ -85284,7 +85149,7 @@ var getIsIgnoredPredicate = (files, cwd) => {
};
};
var normalizeOptions = (options = {}) => ({
cwd: toPath(options.cwd) ?? import_node_process2.default.cwd(),
cwd: toPath(options.cwd) ?? import_node_process.default.cwd(),
suppressErrors: Boolean(options.suppressErrors),
deep: typeof options.deep === "number" ? options.deep : Number.POSITIVE_INFINITY,
ignore: [...options.ignore ?? [], ...defaultIgnoredDirectories]
@@ -85301,7 +85166,7 @@ var isIgnoredByIgnoreFiles = async (patterns, options) => {
const files = await Promise.all(
paths.map(async (filePath) => ({
filePath,
content: await import_promises4.default.readFile(filePath, "utf8")
content: await import_promises3.default.readFile(filePath, "utf8")
}))
);
return getIsIgnoredPredicate(files, cwd);
@@ -85330,14 +85195,14 @@ var assertPatternsInput = (patterns) => {
};
var normalizePathForDirectoryGlob = (filePath, cwd) => {
const path16 = isNegativePattern(filePath) ? filePath.slice(1) : filePath;
return import_node_path3.default.isAbsolute(path16) ? path16 : import_node_path3.default.join(cwd, path16);
return import_node_path2.default.isAbsolute(path16) ? path16 : import_node_path2.default.join(cwd, path16);
};
var getDirectoryGlob = ({ directoryPath, files, extensions }) => {
const extensionGlob = extensions?.length > 0 ? `.${extensions.length > 1 ? `{${extensions.join(",")}}` : extensions[0]}` : "";
return files ? files.map((file) => import_node_path3.default.posix.join(directoryPath, `**/${import_node_path3.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path3.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
return files ? files.map((file) => import_node_path2.default.posix.join(directoryPath, `**/${import_node_path2.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path2.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
};
var directoryToGlob = async (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => {
@@ -85347,7 +85212,7 @@ var directoryToGlob = async (directoryPaths, {
return globs.flat();
};
var directoryToGlobSync = (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => directoryPaths.flatMap((directoryPath) => isDirectorySync(normalizePathForDirectoryGlob(directoryPath, cwd)) ? getDirectoryGlob({ directoryPath, files, extensions }) : directoryPath);
@@ -85405,7 +85270,7 @@ var getFilterSync = (options) => {
var createFilterFunction = (isIgnored) => {
const seen = /* @__PURE__ */ new Set();
return (fastGlobResult) => {
const pathKey = import_node_path3.default.normalize(fastGlobResult.path ?? fastGlobResult);
const pathKey = import_node_path2.default.normalize(fastGlobResult.path ?? fastGlobResult);
if (seen.has(pathKey) || isIgnored && isIgnored(pathKey)) {
return false;
}
@@ -85516,12 +85381,12 @@ var { convertPathToPattern } = import_fast_glob2.default;
var import_is_glob = __toESM(require_is_glob(), 1);
// node_modules/is-path-cwd/index.js
var import_node_process4 = __toESM(require("node:process"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
function isPathCwd(path_) {
let cwd = import_node_process4.default.cwd();
path_ = import_node_path4.default.resolve(path_);
if (import_node_process4.default.platform === "win32") {
let cwd = import_node_process3.default.cwd();
path_ = import_node_path3.default.resolve(path_);
if (import_node_process3.default.platform === "win32") {
cwd = cwd.toLowerCase();
path_ = path_.toLowerCase();
}
@@ -85529,11 +85394,11 @@ function isPathCwd(path_) {
}
// node_modules/del/node_modules/is-path-inside/index.js
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
function isPathInside(childPath, parentPath) {
const relation = import_node_path5.default.relative(parentPath, childPath);
const relation = import_node_path4.default.relative(parentPath, childPath);
return Boolean(
relation && relation !== ".." && !relation.startsWith(`..${import_node_path5.default.sep}`) && relation !== import_node_path5.default.resolve(childPath)
relation && relation !== ".." && !relation.startsWith(`..${import_node_path4.default.sep}`) && relation !== import_node_path4.default.resolve(childPath)
);
}
@@ -85672,14 +85537,14 @@ function safeCheck(file, cwd) {
function normalizePatterns(patterns) {
patterns = Array.isArray(patterns) ? patterns : [patterns];
patterns = patterns.map((pattern) => {
if (import_node_process5.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
if (import_node_process4.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
return slash(pattern);
}
return pattern;
});
return patterns;
}
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5.default.cwd(), onProgress = () => {
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process4.default.cwd(), onProgress = () => {
}, ...options } = {}) {
options = {
expandDirectories: false,
@@ -85700,12 +85565,12 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
let deletedCount = 0;
const mapper = async (file) => {
file = import_node_path6.default.resolve(cwd, file);
file = import_node_path5.default.resolve(cwd, file);
if (!force) {
safeCheck(file, cwd);
}
if (!dryRun) {
await import_promises5.default.rm(file, { recursive: true, force: true });
await import_promises4.default.rm(file, { recursive: true, force: true });
}
deletedCount += 1;
onProgress({
@@ -85722,7 +85587,7 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
// node_modules/get-folder-size/index.js
var import_node_path7 = require("node:path");
var import_node_path6 = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -85747,7 +85612,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path7.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path6.join)(itemPath, directoryItem))
)
);
}
@@ -88526,16 +88391,14 @@ function getErrorMessage(error2) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises4.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -88544,8 +88407,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -88579,34 +88442,6 @@ function satisfiesGHESVersion(ghesVersion, range, defaultIfInvalid) {
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
async function cleanUpGlob(glob, name, logger) {
logger.debug(`Cleaning up ${name}.`);
try {
@@ -90280,7 +90115,7 @@ function getCliConfigCategoryIfExists(cliError) {
}
function isUnsupportedPlatform() {
return !SUPPORTED_PLATFORMS.some(
([platform2, arch2]) => platform2 === process.platform && arch2 === process.arch
([platform, arch2]) => platform === process.platform && arch2 === process.arch
);
}
function getUnsupportedPlatformError(cliError) {
@@ -90696,17 +90531,17 @@ function getCodeQLBundleExtension(compressionMethod) {
}
function getCodeQLBundleName(compressionMethod) {
const extension = getCodeQLBundleExtension(compressionMethod);
let platform2;
let platform;
if (process.platform === "win32") {
platform2 = "win64";
platform = "win64";
} else if (process.platform === "linux") {
platform2 = "linux64";
platform = "linux64";
} else if (process.platform === "darwin") {
platform2 = "osx64";
platform = "osx64";
} else {
return `codeql-bundle${extension}`;
}
return `codeql-bundle-${platform2}${extension}`;
return `codeql-bundle-${platform}${extension}`;
}
function getCodeQLActionRepository(logger) {
if (isRunningLocalAction()) {
@@ -90740,12 +90575,12 @@ async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod
}
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release3 = await getApiClient().rest.repos.getReleaseByTag({
const release2 = await getApiClient().rest.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: tagName
});
for (const asset of release3.data.assets) {
for (const asset of release2.data.assets) {
if (asset.name === codeQLBundleName) {
logger.info(
`Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.`
@@ -91185,14 +91020,14 @@ async function getNightlyToolsUrl(logger) {
zstdAvailability.available
) ? "zstd" : "gzip";
try {
const release3 = await getApiClient().rest.repos.listReleases({
const release2 = await getApiClient().rest.repos.listReleases({
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
per_page: 1,
page: 1,
prerelease: true
});
const latestRelease = release3.data[0];
const latestRelease = release2.data[0];
if (!latestRelease) {
throw new Error("Could not find the latest nightly release.");
}

8
package-lock.json generated
View File

@@ -23,7 +23,6 @@
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
"archiver": "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
"del": "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -4255,13 +4254,6 @@
"url": "https://github.com/chalk/chalk?sponsor=1"
}
},
"node_modules/check-disk-space": {
"version": "3.4.0",
"license": "MIT",
"engines": {
"node": ">=16"
}
},
"node_modules/chownr": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",

View File

@@ -38,7 +38,6 @@
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
"archiver": "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
"del": "^8.0.0",
"fast-deep-equal": "^3.1.3",

View File

@@ -1,11 +1,11 @@
import * as fs from "fs";
import * as fsPromises from "fs/promises";
import * as os from "os";
import * as path from "path";
import * as core from "@actions/core";
import * as exec from "@actions/exec/lib/exec";
import * as io from "@actions/io";
import checkDiskSpace from "check-disk-space";
import * as del from "del";
import getFolderSize from "get-folder-size";
import * as yaml from "js-yaml";
@@ -1099,24 +1099,17 @@ export async function checkDiskUsage(
logger: Logger,
): Promise<DiskUsage | undefined> {
try {
// We avoid running the `df` binary under the hood for macOS ARM runners with SIP disabled.
if (
process.platform === "darwin" &&
(process.arch === "arm" || process.arch === "arm64") &&
!(await checkSipEnablement(logger))
) {
return undefined;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE"),
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = (1024 * 1024) / blockSizeInBytes;
const numBlocksPerGb = (1024 * 1024 * 1024) / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message =
"The Actions runner is running low on disk space " +
`(${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
`(${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env[EnvVar.HAS_WARNED_ABOUT_DISK_SPACE] !== "true") {
logger.warning(message);
} else {
@@ -1125,8 +1118,8 @@ export async function checkDiskUsage(
core.exportVariable(EnvVar.HAS_WARNED_ABOUT_DISK_SPACE, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size,
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes,
};
} catch (error) {
logger.warning(