Merge pull request #3247 from github/henrymercer/disk-usage-node-api

Check disk usage using Node.js API
This commit is contained in:
Henry Mercer
2025-10-28 15:24:12 +00:00
committed by GitHub
16 changed files with 595 additions and 1926 deletions

View File

@@ -26506,7 +26506,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",

349
lib/analyze-action.js generated
View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -28577,13 +28577,13 @@ var require_reusify = __commonJS({
current.next = null;
return current;
}
function release3(obj) {
function release2(obj) {
tail.next = obj;
tail = obj;
}
return {
get,
release: release3
release: release2
};
}
module2.exports = reusify;
@@ -28652,7 +28652,7 @@ var require_queue = __commonJS({
self2.paused = false;
for (var i = 0; i < self2.concurrency; i++) {
_running++;
release3();
release2();
}
}
function idle() {
@@ -28661,7 +28661,7 @@ var require_queue = __commonJS({
function push(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28681,7 +28681,7 @@ var require_queue = __commonJS({
function unshift(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28698,7 +28698,7 @@ var require_queue = __commonJS({
worker.call(context2, current.value, current.worked);
}
}
function release3(holder) {
function release2(holder) {
if (holder) {
cache.release(holder);
}
@@ -30719,8 +30719,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -30731,7 +30731,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -30822,7 +30822,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -30888,7 +30888,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -30898,7 +30898,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -32355,7 +32355,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -37294,8 +37293,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -37371,20 +37370,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -60866,7 +60865,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -62674,7 +62673,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -62821,7 +62820,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -71667,7 +71666,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -71683,7 +71682,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -85964,161 +85963,27 @@ var io2 = __toESM(require_io());
// src/util.ts
var fs4 = __toESM(require("fs"));
var fsPromises4 = __toESM(require("fs/promises"));
var os = __toESM(require("os"));
var path5 = __toESM(require("path"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/del/index.js
var import_promises5 = __toESM(require("node:fs/promises"), 1);
var import_node_path6 = __toESM(require("node:path"), 1);
var import_node_process5 = __toESM(require("node:process"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_process4 = __toESM(require("node:process"), 1);
// node_modules/globby/index.js
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_fs3 = __toESM(require("node:fs"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
// node_modules/globby/node_modules/@sindresorhus/merge-streams/index.js
var import_node_events = require("node:events");
var import_node_stream = require("node:stream");
var import_promises2 = require("node:stream/promises");
var import_promises = require("node:stream/promises");
function mergeStreams(streams) {
if (!Array.isArray(streams)) {
throw new TypeError(`Expected an array, got \`${typeof streams}\`.`);
@@ -86193,7 +86058,7 @@ var onMergedStreamFinished = async (passThroughStream, streams) => {
}
};
var onMergedStreamEnd = async (passThroughStream, { signal }) => {
await (0, import_promises2.finished)(passThroughStream, { signal, cleanup: true });
await (0, import_promises.finished)(passThroughStream, { signal, cleanup: true });
};
var onInputStreamsUnpipe = async (passThroughStream, streams, { signal }) => {
for await (const [unpipedStream] of (0, import_node_events.on)(passThroughStream, "unpipe", { signal })) {
@@ -86243,7 +86108,7 @@ var afterMergedStreamFinished = async (onFinished, stream2) => {
};
var onInputStreamEnd = async ({ passThroughStream, stream: stream2, streams, ended, aborted, controller: { signal } }) => {
try {
await (0, import_promises2.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
await (0, import_promises.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
if (streams.has(stream2)) {
ended.add(stream2);
}
@@ -86297,13 +86162,13 @@ var import_fast_glob2 = __toESM(require_out4(), 1);
// node_modules/path-type/index.js
var import_node_fs = __toESM(require("node:fs"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_promises2 = __toESM(require("node:fs/promises"), 1);
async function isType(fsStatType, statsMethodName, filePath) {
if (typeof filePath !== "string") {
throw new TypeError(`Expected a string, got ${typeof filePath}`);
}
try {
const stats = await import_promises3.default[fsStatType](filePath);
const stats = await import_promises2.default[fsStatType](filePath);
return stats[statsMethodName]();
} catch (error2) {
if (error2.code === "ENOENT") {
@@ -86333,20 +86198,20 @@ var isDirectorySync = isTypeSync.bind(void 0, "statSync", "isDirectory");
var isSymlinkSync = isTypeSync.bind(void 0, "lstatSync", "isSymbolicLink");
// node_modules/unicorn-magic/node.js
var import_node_util2 = require("node:util");
var import_node_child_process2 = require("node:child_process");
var import_node_util = require("node:util");
var import_node_child_process = require("node:child_process");
var import_node_url = require("node:url");
var execFileOriginal = (0, import_node_util2.promisify)(import_node_child_process2.execFile);
var execFileOriginal = (0, import_node_util.promisify)(import_node_child_process.execFile);
function toPath(urlOrPath) {
return urlOrPath instanceof URL ? (0, import_node_url.fileURLToPath)(urlOrPath) : urlOrPath;
}
var TEN_MEGABYTES_IN_BYTES = 10 * 1024 * 1024;
// node_modules/globby/ignore.js
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_process = __toESM(require("node:process"), 1);
var import_node_fs2 = __toESM(require("node:fs"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_node_path = __toESM(require("node:path"), 1);
var import_fast_glob = __toESM(require_out4(), 1);
var import_ignore = __toESM(require_ignore(), 1);
@@ -86374,16 +86239,16 @@ var ignoreFilesGlobOptions = {
dot: true
};
var GITIGNORE_FILES_PATTERN = "**/.gitignore";
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path2.default.posix.join(base, pattern.slice(1)) : import_node_path2.default.posix.join(base, pattern);
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path.default.posix.join(base, pattern.slice(1)) : import_node_path.default.posix.join(base, pattern);
var parseIgnoreFile = (file, cwd) => {
const base = slash(import_node_path2.default.relative(cwd, import_node_path2.default.dirname(file.filePath)));
const base = slash(import_node_path.default.relative(cwd, import_node_path.default.dirname(file.filePath)));
return file.content.split(/\r?\n/).filter((line) => line && !line.startsWith("#")).map((pattern) => applyBaseToPattern(pattern, base));
};
var toRelativePath = (fileOrDirectory, cwd) => {
cwd = slash(cwd);
if (import_node_path2.default.isAbsolute(fileOrDirectory)) {
if (import_node_path.default.isAbsolute(fileOrDirectory)) {
if (slash(fileOrDirectory).startsWith(cwd)) {
return import_node_path2.default.relative(cwd, fileOrDirectory);
return import_node_path.default.relative(cwd, fileOrDirectory);
}
throw new Error(`Path ${fileOrDirectory} is not in cwd ${cwd}`);
}
@@ -86399,7 +86264,7 @@ var getIsIgnoredPredicate = (files, cwd) => {
};
};
var normalizeOptions = (options = {}) => ({
cwd: toPath(options.cwd) ?? import_node_process2.default.cwd(),
cwd: toPath(options.cwd) ?? import_node_process.default.cwd(),
suppressErrors: Boolean(options.suppressErrors),
deep: typeof options.deep === "number" ? options.deep : Number.POSITIVE_INFINITY,
ignore: [...options.ignore ?? [], ...defaultIgnoredDirectories]
@@ -86416,7 +86281,7 @@ var isIgnoredByIgnoreFiles = async (patterns, options) => {
const files = await Promise.all(
paths.map(async (filePath) => ({
filePath,
content: await import_promises4.default.readFile(filePath, "utf8")
content: await import_promises3.default.readFile(filePath, "utf8")
}))
);
return getIsIgnoredPredicate(files, cwd);
@@ -86445,14 +86310,14 @@ var assertPatternsInput = (patterns) => {
};
var normalizePathForDirectoryGlob = (filePath, cwd) => {
const path20 = isNegativePattern(filePath) ? filePath.slice(1) : filePath;
return import_node_path3.default.isAbsolute(path20) ? path20 : import_node_path3.default.join(cwd, path20);
return import_node_path2.default.isAbsolute(path20) ? path20 : import_node_path2.default.join(cwd, path20);
};
var getDirectoryGlob = ({ directoryPath, files, extensions }) => {
const extensionGlob = extensions?.length > 0 ? `.${extensions.length > 1 ? `{${extensions.join(",")}}` : extensions[0]}` : "";
return files ? files.map((file) => import_node_path3.default.posix.join(directoryPath, `**/${import_node_path3.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path3.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
return files ? files.map((file) => import_node_path2.default.posix.join(directoryPath, `**/${import_node_path2.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path2.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
};
var directoryToGlob = async (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => {
@@ -86462,7 +86327,7 @@ var directoryToGlob = async (directoryPaths, {
return globs.flat();
};
var directoryToGlobSync = (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => directoryPaths.flatMap((directoryPath) => isDirectorySync(normalizePathForDirectoryGlob(directoryPath, cwd)) ? getDirectoryGlob({ directoryPath, files, extensions }) : directoryPath);
@@ -86520,7 +86385,7 @@ var getFilterSync = (options) => {
var createFilterFunction = (isIgnored) => {
const seen = /* @__PURE__ */ new Set();
return (fastGlobResult) => {
const pathKey = import_node_path3.default.normalize(fastGlobResult.path ?? fastGlobResult);
const pathKey = import_node_path2.default.normalize(fastGlobResult.path ?? fastGlobResult);
if (seen.has(pathKey) || isIgnored && isIgnored(pathKey)) {
return false;
}
@@ -86631,12 +86496,12 @@ var { convertPathToPattern } = import_fast_glob2.default;
var import_is_glob = __toESM(require_is_glob(), 1);
// node_modules/is-path-cwd/index.js
var import_node_process4 = __toESM(require("node:process"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
function isPathCwd(path_) {
let cwd = import_node_process4.default.cwd();
path_ = import_node_path4.default.resolve(path_);
if (import_node_process4.default.platform === "win32") {
let cwd = import_node_process3.default.cwd();
path_ = import_node_path3.default.resolve(path_);
if (import_node_process3.default.platform === "win32") {
cwd = cwd.toLowerCase();
path_ = path_.toLowerCase();
}
@@ -86644,11 +86509,11 @@ function isPathCwd(path_) {
}
// node_modules/del/node_modules/is-path-inside/index.js
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
function isPathInside(childPath, parentPath) {
const relation = import_node_path5.default.relative(parentPath, childPath);
const relation = import_node_path4.default.relative(parentPath, childPath);
return Boolean(
relation && relation !== ".." && !relation.startsWith(`..${import_node_path5.default.sep}`) && relation !== import_node_path5.default.resolve(childPath)
relation && relation !== ".." && !relation.startsWith(`..${import_node_path4.default.sep}`) && relation !== import_node_path4.default.resolve(childPath)
);
}
@@ -86787,14 +86652,14 @@ function safeCheck(file, cwd) {
function normalizePatterns(patterns) {
patterns = Array.isArray(patterns) ? patterns : [patterns];
patterns = patterns.map((pattern) => {
if (import_node_process5.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
if (import_node_process4.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
return slash(pattern);
}
return pattern;
});
return patterns;
}
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5.default.cwd(), onProgress = () => {
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process4.default.cwd(), onProgress = () => {
}, ...options } = {}) {
options = {
expandDirectories: false,
@@ -86815,12 +86680,12 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
let deletedCount = 0;
const mapper = async (file) => {
file = import_node_path6.default.resolve(cwd, file);
file = import_node_path5.default.resolve(cwd, file);
if (!force) {
safeCheck(file, cwd);
}
if (!dryRun) {
await import_promises5.default.rm(file, { recursive: true, force: true });
await import_promises4.default.rm(file, { recursive: true, force: true });
}
deletedCount += 1;
onProgress({
@@ -86837,7 +86702,7 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
// node_modules/get-folder-size/index.js
var import_node_path7 = require("node:path");
var import_node_path6 = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -86862,7 +86727,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path7.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path6.join)(itemPath, directoryItem))
)
);
}
@@ -89506,8 +89371,8 @@ function getToolNames(sarif) {
}
return Object.keys(toolNames);
}
function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, platform3) {
const fixedAmount = 1024 * (platform3 === "win32" ? 1.5 : 1);
function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, platform2) {
const fixedAmount = 1024 * (platform2 === "win32" ? 1.5 : 1);
const scaledAmount = getReservedRamScaleFactor() * Math.max(totalMemoryMegaBytes - 8 * 1024, 0);
return fixedAmount + scaledAmount;
}
@@ -89521,7 +89386,7 @@ function getReservedRamScaleFactor() {
}
return envVar / 100;
}
function getMemoryFlagValueForPlatform(userInput, totalMemoryBytes, platform3) {
function getMemoryFlagValueForPlatform(userInput, totalMemoryBytes, platform2) {
let memoryToUseMegaBytes;
if (userInput) {
memoryToUseMegaBytes = Number(userInput);
@@ -89534,7 +89399,7 @@ function getMemoryFlagValueForPlatform(userInput, totalMemoryBytes, platform3) {
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes(
totalMemoryMegaBytes,
platform3
platform2
);
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
}
@@ -89889,16 +89754,14 @@ function getErrorMessage(error2) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises4.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -89907,8 +89770,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -89942,34 +89805,6 @@ function satisfiesGHESVersion(ghesVersion, range, defaultIfInvalid) {
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
async function cleanUpGlob(glob2, name, logger) {
logger.debug(`Cleaning up ${name}.`);
try {
@@ -90663,7 +90498,7 @@ function getCliConfigCategoryIfExists(cliError) {
}
function isUnsupportedPlatform() {
return !SUPPORTED_PLATFORMS.some(
([platform3, arch2]) => platform3 === process.platform && arch2 === process.arch
([platform2, arch2]) => platform2 === process.platform && arch2 === process.arch
);
}
function getUnsupportedPlatformError(cliError) {
@@ -92349,17 +92184,17 @@ function getCodeQLBundleExtension(compressionMethod) {
}
function getCodeQLBundleName(compressionMethod) {
const extension = getCodeQLBundleExtension(compressionMethod);
let platform3;
let platform2;
if (process.platform === "win32") {
platform3 = "win64";
platform2 = "win64";
} else if (process.platform === "linux") {
platform3 = "linux64";
platform2 = "linux64";
} else if (process.platform === "darwin") {
platform3 = "osx64";
platform2 = "osx64";
} else {
return `codeql-bundle${extension}`;
}
return `codeql-bundle-${platform3}${extension}`;
return `codeql-bundle-${platform2}${extension}`;
}
function getCodeQLActionRepository(logger) {
if (isRunningLocalAction()) {
@@ -92393,12 +92228,12 @@ async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod
}
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release3 = await getApiClient().rest.repos.getReleaseByTag({
const release2 = await getApiClient().rest.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: tagName
});
for (const asset of release3.data.assets) {
for (const asset of release2.data.assets) {
if (asset.name === codeQLBundleName) {
logger.info(
`Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.`
@@ -92838,14 +92673,14 @@ async function getNightlyToolsUrl(logger) {
zstdAvailability.available
) ? "zstd" : "gzip";
try {
const release3 = await getApiClient().rest.repos.listReleases({
const release2 = await getApiClient().rest.repos.listReleases({
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
per_page: 1,
page: 1,
prerelease: true
});
const latestRelease = release3.data[0];
const latestRelease = release2.data[0];
if (!latestRelease) {
throw new Error("Could not find the latest nightly release.");
}

233
lib/autobuild-action.js generated
View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify2 } = require("util");
var { promisify } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify2(this[kOriginalClose])();
await promisify(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify2 } = require("util");
var { promisify } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify2(this[kOriginalClose])();
await promisify(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -24870,8 +24870,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -24882,7 +24882,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -24973,7 +24973,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -25039,7 +25039,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -25049,7 +25049,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -26506,7 +26506,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -31445,8 +31444,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -31522,20 +31521,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -55017,7 +55016,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -56825,7 +56824,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -56972,7 +56971,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -65818,7 +65817,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -65834,7 +65833,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -76112,148 +76111,14 @@ var github = __toESM(require_github());
var io2 = __toESM(require_io());
// src/util.ts
var fsPromises = __toESM(require("fs/promises"));
var path = __toESM(require("path"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/get-folder-size/index.js
var import_node_path2 = require("node:path");
var import_node_path = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -76278,7 +76143,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path2.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path.join)(itemPath, directoryItem))
)
);
}
@@ -79047,16 +78912,14 @@ function getErrorMessage(error2) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -79065,8 +78928,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -79092,34 +78955,6 @@ function checkActionVersion(version, githubVersion) {
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
async function asyncFilter(array, predicate) {
const results = await Promise.all(array.map(predicate));
return array.filter((_, index) => results[index]);
@@ -79587,7 +79422,7 @@ function getCliConfigCategoryIfExists(cliError) {
}
function isUnsupportedPlatform() {
return !SUPPORTED_PLATFORMS.some(
([platform2, arch]) => platform2 === process.platform && arch === process.arch
([platform, arch]) => platform === process.platform && arch === process.arch
);
}
function getUnsupportedPlatformError(cliError) {

391
lib/init-action-post.js generated
View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -28577,13 +28577,13 @@ var require_reusify = __commonJS({
current.next = null;
return current;
}
function release3(obj) {
function release2(obj) {
tail.next = obj;
tail = obj;
}
return {
get,
release: release3
release: release2
};
}
module2.exports = reusify;
@@ -28652,7 +28652,7 @@ var require_queue = __commonJS({
self2.paused = false;
for (var i = 0; i < self2.concurrency; i++) {
_running++;
release3();
release2();
}
}
function idle() {
@@ -28661,7 +28661,7 @@ var require_queue = __commonJS({
function push(value, done) {
var current = cache.get();
current.context = context3;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28681,7 +28681,7 @@ var require_queue = __commonJS({
function unshift(value, done) {
var current = cache.get();
current.context = context3;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28698,7 +28698,7 @@ var require_queue = __commonJS({
worker.call(context3, current.value, current.worked);
}
}
function release3(holder) {
function release2(holder) {
if (holder) {
cache.release(holder);
}
@@ -30719,8 +30719,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -30731,7 +30731,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -30822,7 +30822,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -30888,7 +30888,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -30898,7 +30898,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -32355,7 +32355,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -37294,8 +37293,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -37371,20 +37370,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -60866,7 +60865,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -62674,7 +62673,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -62821,7 +62820,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -71667,7 +71666,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -71683,7 +71682,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -87698,7 +87697,7 @@ var require_polyfills = __commonJS({
var constants = require("constants");
var origCwd = process.cwd;
var cwd = null;
var platform2 = process.env.GRACEFUL_FS_PLATFORM || process.platform;
var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform;
process.cwd = function() {
if (!cwd)
cwd = origCwd.call(process);
@@ -87757,7 +87756,7 @@ var require_polyfills = __commonJS({
fs20.lchownSync = function() {
};
}
if (platform2 === "win32") {
if (platform === "win32") {
fs20.rename = typeof fs20.rename !== "function" ? fs20.rename : (function(fs$rename) {
function rename(from, to, cb) {
var start = Date.now();
@@ -91278,8 +91277,8 @@ var require_primordials = __commonJS({
ArrayPrototypeIndexOf(self2, el) {
return self2.indexOf(el);
},
ArrayPrototypeJoin(self2, sep5) {
return self2.join(sep5);
ArrayPrototypeJoin(self2, sep4) {
return self2.join(sep4);
},
ArrayPrototypeMap(self2, fn) {
return self2.map(fn);
@@ -102090,7 +102089,7 @@ var require_commonjs16 = __commonJS({
var TYPEMASK = 1023;
var entToType = (s) => s.isFile() ? IFREG : s.isDirectory() ? IFDIR : s.isSymbolicLink() ? IFLNK : s.isCharacterDevice() ? IFCHR : s.isBlockDevice() ? IFBLK : s.isSocket() ? IFSOCK : s.isFIFO() ? IFIFO : UNKNOWN;
var normalizeCache = /* @__PURE__ */ new Map();
var normalize3 = (s) => {
var normalize2 = (s) => {
const c = normalizeCache.get(s);
if (c)
return c;
@@ -102103,7 +102102,7 @@ var require_commonjs16 = __commonJS({
const c = normalizeNocaseCache.get(s);
if (c)
return c;
const n = normalize3(s.toLowerCase());
const n = normalize2(s.toLowerCase());
normalizeNocaseCache.set(s, n);
return n;
};
@@ -102272,7 +102271,7 @@ var require_commonjs16 = __commonJS({
*/
constructor(name, type2 = UNKNOWN, root, roots, nocase, children, opts) {
this.name = name;
this.#matchName = nocase ? normalizeNocase(name) : normalize3(name);
this.#matchName = nocase ? normalizeNocase(name) : normalize2(name);
this.#type = type2 & TYPEMASK;
this.nocase = nocase;
this.roots = roots;
@@ -102365,7 +102364,7 @@ var require_commonjs16 = __commonJS({
return this.parent || this;
}
const children = this.children();
const name = this.nocase ? normalizeNocase(pathPart) : normalize3(pathPart);
const name = this.nocase ? normalizeNocase(pathPart) : normalize2(pathPart);
for (const p of children) {
if (p.#matchName === name) {
return p;
@@ -102610,7 +102609,7 @@ var require_commonjs16 = __commonJS({
* directly.
*/
isNamed(n) {
return !this.nocase ? this.#matchName === normalize3(n) : this.#matchName === normalizeNocase(n);
return !this.nocase ? this.#matchName === normalize2(n) : this.#matchName === normalizeNocase(n);
}
/**
* Return the Path object corresponding to the target of a symbolic link.
@@ -102749,7 +102748,7 @@ var require_commonjs16 = __commonJS({
#readdirMaybePromoteChild(e, c) {
for (let p = c.provisional; p < c.length; p++) {
const pchild = c[p];
const name = this.nocase ? normalizeNocase(e.name) : normalize3(e.name);
const name = this.nocase ? normalizeNocase(e.name) : normalize2(e.name);
if (name !== pchild.#matchName) {
continue;
}
@@ -103166,7 +103165,7 @@ var require_commonjs16 = __commonJS({
*
* @internal
*/
constructor(cwd = process.cwd(), pathImpl, sep5, { nocase, childrenCacheSize = 16 * 1024, fs: fs20 = defaultFS } = {}) {
constructor(cwd = process.cwd(), pathImpl, sep4, { nocase, childrenCacheSize = 16 * 1024, fs: fs20 = defaultFS } = {}) {
this.#fs = fsFromOption(fs20);
if (cwd instanceof URL || cwd.startsWith("file://")) {
cwd = (0, node_url_1.fileURLToPath)(cwd);
@@ -103177,7 +103176,7 @@ var require_commonjs16 = __commonJS({
this.#resolveCache = new ResolveCache();
this.#resolvePosixCache = new ResolveCache();
this.#children = new ChildrenCache(childrenCacheSize);
const split = cwdPath.substring(this.rootPath.length).split(sep5);
const split = cwdPath.substring(this.rootPath.length).split(sep4);
if (split.length === 1 && !split[0]) {
split.pop();
}
@@ -103800,7 +103799,7 @@ var require_pattern2 = __commonJS({
#isUNC;
#isAbsolute;
#followGlobstar = true;
constructor(patternList, globList, index, platform2) {
constructor(patternList, globList, index, platform) {
if (!isPatternList(patternList)) {
throw new TypeError("empty pattern list");
}
@@ -103817,7 +103816,7 @@ var require_pattern2 = __commonJS({
this.#patternList = patternList;
this.#globList = globList;
this.#index = index;
this.#platform = platform2;
this.#platform = platform;
if (this.#index === 0) {
if (this.isUNC()) {
const [p0, p1, p2, p3, ...prest] = this.#patternList;
@@ -103969,12 +103968,12 @@ var require_ignore2 = __commonJS({
absoluteChildren;
platform;
mmopts;
constructor(ignored, { nobrace, nocase, noext, noglobstar, platform: platform2 = defaultPlatform }) {
constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform }) {
this.relative = [];
this.absolute = [];
this.relativeChildren = [];
this.absoluteChildren = [];
this.platform = platform2;
this.platform = platform;
this.mmopts = {
dot: true,
nobrace,
@@ -103982,7 +103981,7 @@ var require_ignore2 = __commonJS({
noext,
noglobstar,
optimizationLevel: 2,
platform: platform2,
platform,
nocomment: true,
nonegate: true
};
@@ -106194,8 +106193,8 @@ var require_zip_archive_entry = __commonJS({
}
this.name = name;
};
ZipArchiveEntry.prototype.setPlatform = function(platform2) {
this.platform = platform2;
ZipArchiveEntry.prototype.setPlatform = function(platform) {
this.platform = platform;
};
ZipArchiveEntry.prototype.setSize = function(size) {
if (size < 0) {
@@ -113374,13 +113373,13 @@ var require_tmp = __commonJS({
var require_tmp_promise = __commonJS({
"node_modules/tmp-promise/index.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var tmp = require_tmp();
module2.exports.fileSync = tmp.fileSync;
var fileWithOptions = promisify3(
var fileWithOptions = promisify2(
(options, cb) => tmp.file(
options,
(err, path19, fd, cleanup) => err ? cb(err) : cb(void 0, { path: path19, fd, cleanup: promisify3(cleanup) })
(err, path19, fd, cleanup) => err ? cb(err) : cb(void 0, { path: path19, fd, cleanup: promisify2(cleanup) })
)
);
module2.exports.file = async (options) => fileWithOptions(options);
@@ -113393,10 +113392,10 @@ var require_tmp_promise = __commonJS({
}
};
module2.exports.dirSync = tmp.dirSync;
var dirWithOptions = promisify3(
var dirWithOptions = promisify2(
(options, cb) => tmp.dir(
options,
(err, path19, cleanup) => err ? cb(err) : cb(void 0, { path: path19, cleanup: promisify3(cleanup) })
(err, path19, cleanup) => err ? cb(err) : cb(void 0, { path: path19, cleanup: promisify2(cleanup) })
)
);
module2.exports.dir = async (options) => dirWithOptions(options);
@@ -113409,7 +113408,7 @@ var require_tmp_promise = __commonJS({
}
};
module2.exports.tmpNameSync = tmp.tmpNameSync;
module2.exports.tmpName = promisify3(tmp.tmpName);
module2.exports.tmpName = promisify2(tmp.tmpName);
module2.exports.tmpdir = tmp.tmpdir;
module2.exports.setGracefulCleanup = tmp.setGracefulCleanup;
}
@@ -119362,160 +119361,26 @@ var io2 = __toESM(require_io());
// src/util.ts
var fs4 = __toESM(require("fs"));
var fsPromises4 = __toESM(require("fs/promises"));
var path5 = __toESM(require("path"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/del/index.js
var import_promises5 = __toESM(require("node:fs/promises"), 1);
var import_node_path6 = __toESM(require("node:path"), 1);
var import_node_process5 = __toESM(require("node:process"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_process4 = __toESM(require("node:process"), 1);
// node_modules/globby/index.js
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_fs3 = __toESM(require("node:fs"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
// node_modules/globby/node_modules/@sindresorhus/merge-streams/index.js
var import_node_events = require("node:events");
var import_node_stream = require("node:stream");
var import_promises2 = require("node:stream/promises");
var import_promises = require("node:stream/promises");
function mergeStreams(streams) {
if (!Array.isArray(streams)) {
throw new TypeError(`Expected an array, got \`${typeof streams}\`.`);
@@ -119590,7 +119455,7 @@ var onMergedStreamFinished = async (passThroughStream, streams) => {
}
};
var onMergedStreamEnd = async (passThroughStream, { signal }) => {
await (0, import_promises2.finished)(passThroughStream, { signal, cleanup: true });
await (0, import_promises.finished)(passThroughStream, { signal, cleanup: true });
};
var onInputStreamsUnpipe = async (passThroughStream, streams, { signal }) => {
for await (const [unpipedStream] of (0, import_node_events.on)(passThroughStream, "unpipe", { signal })) {
@@ -119640,7 +119505,7 @@ var afterMergedStreamFinished = async (onFinished, stream2) => {
};
var onInputStreamEnd = async ({ passThroughStream, stream: stream2, streams, ended, aborted, controller: { signal } }) => {
try {
await (0, import_promises2.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
await (0, import_promises.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
if (streams.has(stream2)) {
ended.add(stream2);
}
@@ -119694,13 +119559,13 @@ var import_fast_glob2 = __toESM(require_out4(), 1);
// node_modules/path-type/index.js
var import_node_fs = __toESM(require("node:fs"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_promises2 = __toESM(require("node:fs/promises"), 1);
async function isType(fsStatType, statsMethodName, filePath) {
if (typeof filePath !== "string") {
throw new TypeError(`Expected a string, got ${typeof filePath}`);
}
try {
const stats = await import_promises3.default[fsStatType](filePath);
const stats = await import_promises2.default[fsStatType](filePath);
return stats[statsMethodName]();
} catch (error2) {
if (error2.code === "ENOENT") {
@@ -119730,20 +119595,20 @@ var isDirectorySync = isTypeSync.bind(void 0, "statSync", "isDirectory");
var isSymlinkSync = isTypeSync.bind(void 0, "lstatSync", "isSymbolicLink");
// node_modules/unicorn-magic/node.js
var import_node_util2 = require("node:util");
var import_node_child_process2 = require("node:child_process");
var import_node_util = require("node:util");
var import_node_child_process = require("node:child_process");
var import_node_url = require("node:url");
var execFileOriginal = (0, import_node_util2.promisify)(import_node_child_process2.execFile);
var execFileOriginal = (0, import_node_util.promisify)(import_node_child_process.execFile);
function toPath(urlOrPath) {
return urlOrPath instanceof URL ? (0, import_node_url.fileURLToPath)(urlOrPath) : urlOrPath;
}
var TEN_MEGABYTES_IN_BYTES = 10 * 1024 * 1024;
// node_modules/globby/ignore.js
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_process = __toESM(require("node:process"), 1);
var import_node_fs2 = __toESM(require("node:fs"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_node_path = __toESM(require("node:path"), 1);
var import_fast_glob = __toESM(require_out4(), 1);
var import_ignore = __toESM(require_ignore(), 1);
@@ -119771,16 +119636,16 @@ var ignoreFilesGlobOptions = {
dot: true
};
var GITIGNORE_FILES_PATTERN = "**/.gitignore";
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path2.default.posix.join(base, pattern.slice(1)) : import_node_path2.default.posix.join(base, pattern);
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path.default.posix.join(base, pattern.slice(1)) : import_node_path.default.posix.join(base, pattern);
var parseIgnoreFile = (file, cwd) => {
const base = slash(import_node_path2.default.relative(cwd, import_node_path2.default.dirname(file.filePath)));
const base = slash(import_node_path.default.relative(cwd, import_node_path.default.dirname(file.filePath)));
return file.content.split(/\r?\n/).filter((line) => line && !line.startsWith("#")).map((pattern) => applyBaseToPattern(pattern, base));
};
var toRelativePath = (fileOrDirectory, cwd) => {
cwd = slash(cwd);
if (import_node_path2.default.isAbsolute(fileOrDirectory)) {
if (import_node_path.default.isAbsolute(fileOrDirectory)) {
if (slash(fileOrDirectory).startsWith(cwd)) {
return import_node_path2.default.relative(cwd, fileOrDirectory);
return import_node_path.default.relative(cwd, fileOrDirectory);
}
throw new Error(`Path ${fileOrDirectory} is not in cwd ${cwd}`);
}
@@ -119796,7 +119661,7 @@ var getIsIgnoredPredicate = (files, cwd) => {
};
};
var normalizeOptions = (options = {}) => ({
cwd: toPath(options.cwd) ?? import_node_process2.default.cwd(),
cwd: toPath(options.cwd) ?? import_node_process.default.cwd(),
suppressErrors: Boolean(options.suppressErrors),
deep: typeof options.deep === "number" ? options.deep : Number.POSITIVE_INFINITY,
ignore: [...options.ignore ?? [], ...defaultIgnoredDirectories]
@@ -119813,7 +119678,7 @@ var isIgnoredByIgnoreFiles = async (patterns, options) => {
const files = await Promise.all(
paths.map(async (filePath) => ({
filePath,
content: await import_promises4.default.readFile(filePath, "utf8")
content: await import_promises3.default.readFile(filePath, "utf8")
}))
);
return getIsIgnoredPredicate(files, cwd);
@@ -119842,14 +119707,14 @@ var assertPatternsInput = (patterns) => {
};
var normalizePathForDirectoryGlob = (filePath, cwd) => {
const path19 = isNegativePattern(filePath) ? filePath.slice(1) : filePath;
return import_node_path3.default.isAbsolute(path19) ? path19 : import_node_path3.default.join(cwd, path19);
return import_node_path2.default.isAbsolute(path19) ? path19 : import_node_path2.default.join(cwd, path19);
};
var getDirectoryGlob = ({ directoryPath, files, extensions }) => {
const extensionGlob = extensions?.length > 0 ? `.${extensions.length > 1 ? `{${extensions.join(",")}}` : extensions[0]}` : "";
return files ? files.map((file) => import_node_path3.default.posix.join(directoryPath, `**/${import_node_path3.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path3.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
return files ? files.map((file) => import_node_path2.default.posix.join(directoryPath, `**/${import_node_path2.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path2.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
};
var directoryToGlob = async (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => {
@@ -119859,7 +119724,7 @@ var directoryToGlob = async (directoryPaths, {
return globs.flat();
};
var directoryToGlobSync = (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => directoryPaths.flatMap((directoryPath) => isDirectorySync(normalizePathForDirectoryGlob(directoryPath, cwd)) ? getDirectoryGlob({ directoryPath, files, extensions }) : directoryPath);
@@ -119917,7 +119782,7 @@ var getFilterSync = (options) => {
var createFilterFunction = (isIgnored) => {
const seen = /* @__PURE__ */ new Set();
return (fastGlobResult) => {
const pathKey = import_node_path3.default.normalize(fastGlobResult.path ?? fastGlobResult);
const pathKey = import_node_path2.default.normalize(fastGlobResult.path ?? fastGlobResult);
if (seen.has(pathKey) || isIgnored && isIgnored(pathKey)) {
return false;
}
@@ -120028,12 +119893,12 @@ var { convertPathToPattern } = import_fast_glob2.default;
var import_is_glob = __toESM(require_is_glob(), 1);
// node_modules/is-path-cwd/index.js
var import_node_process4 = __toESM(require("node:process"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
function isPathCwd(path_) {
let cwd = import_node_process4.default.cwd();
path_ = import_node_path4.default.resolve(path_);
if (import_node_process4.default.platform === "win32") {
let cwd = import_node_process3.default.cwd();
path_ = import_node_path3.default.resolve(path_);
if (import_node_process3.default.platform === "win32") {
cwd = cwd.toLowerCase();
path_ = path_.toLowerCase();
}
@@ -120041,11 +119906,11 @@ function isPathCwd(path_) {
}
// node_modules/del/node_modules/is-path-inside/index.js
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
function isPathInside(childPath, parentPath) {
const relation = import_node_path5.default.relative(parentPath, childPath);
const relation = import_node_path4.default.relative(parentPath, childPath);
return Boolean(
relation && relation !== ".." && !relation.startsWith(`..${import_node_path5.default.sep}`) && relation !== import_node_path5.default.resolve(childPath)
relation && relation !== ".." && !relation.startsWith(`..${import_node_path4.default.sep}`) && relation !== import_node_path4.default.resolve(childPath)
);
}
@@ -120184,14 +120049,14 @@ function safeCheck(file, cwd) {
function normalizePatterns(patterns) {
patterns = Array.isArray(patterns) ? patterns : [patterns];
patterns = patterns.map((pattern) => {
if (import_node_process5.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
if (import_node_process4.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
return slash(pattern);
}
return pattern;
});
return patterns;
}
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5.default.cwd(), onProgress = () => {
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process4.default.cwd(), onProgress = () => {
}, ...options } = {}) {
options = {
expandDirectories: false,
@@ -120212,12 +120077,12 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
let deletedCount = 0;
const mapper = async (file) => {
file = import_node_path6.default.resolve(cwd, file);
file = import_node_path5.default.resolve(cwd, file);
if (!force) {
safeCheck(file, cwd);
}
if (!dryRun) {
await import_promises5.default.rm(file, { recursive: true, force: true });
await import_promises4.default.rm(file, { recursive: true, force: true });
}
deletedCount += 1;
onProgress({
@@ -120234,7 +120099,7 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
// node_modules/get-folder-size/index.js
var import_node_path7 = require("node:path");
var import_node_path6 = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -120259,7 +120124,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path7.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path6.join)(itemPath, directoryItem))
)
);
}
@@ -123095,16 +122960,14 @@ function getErrorMessage(error2) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises4.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -123113,8 +122976,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -123134,34 +122997,6 @@ function satisfiesGHESVersion(ghesVersion, range, defaultIfInvalid) {
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
async function cleanUpGlob(glob2, name, logger) {
logger.debug(`Cleaning up ${name}.`);
try {
@@ -123803,7 +123638,7 @@ function getCliConfigCategoryIfExists(cliError) {
}
function isUnsupportedPlatform() {
return !SUPPORTED_PLATFORMS.some(
([platform2, arch2]) => platform2 === process.platform && arch2 === process.arch
([platform, arch2]) => platform === process.platform && arch2 === process.arch
);
}
function getUnsupportedPlatformError(cliError) {
@@ -125121,17 +124956,17 @@ function getCodeQLBundleExtension(compressionMethod) {
}
function getCodeQLBundleName(compressionMethod) {
const extension = getCodeQLBundleExtension(compressionMethod);
let platform2;
let platform;
if (process.platform === "win32") {
platform2 = "win64";
platform = "win64";
} else if (process.platform === "linux") {
platform2 = "linux64";
platform = "linux64";
} else if (process.platform === "darwin") {
platform2 = "osx64";
platform = "osx64";
} else {
return `codeql-bundle${extension}`;
}
return `codeql-bundle-${platform2}${extension}`;
return `codeql-bundle-${platform}${extension}`;
}
function getCodeQLActionRepository(logger) {
if (isRunningLocalAction()) {
@@ -125165,12 +125000,12 @@ async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod
}
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release3 = await getApiClient().rest.repos.getReleaseByTag({
const release2 = await getApiClient().rest.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: tagName
});
for (const asset of release3.data.assets) {
for (const asset of release2.data.assets) {
if (asset.name === codeQLBundleName) {
logger.info(
`Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.`
@@ -125610,14 +125445,14 @@ async function getNightlyToolsUrl(logger) {
zstdAvailability.available
) ? "zstd" : "gzip";
try {
const release3 = await getApiClient().rest.repos.listReleases({
const release2 = await getApiClient().rest.repos.listReleases({
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
per_page: 1,
page: 1,
prerelease: true
});
const latestRelease = release3.data[0];
const latestRelease = release2.data[0];
if (!latestRelease) {
throw new Error("Could not find the latest nightly release.");
}

349
lib/init-action.js generated
View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -20153,8 +20153,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -20165,7 +20165,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -20256,7 +20256,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -20322,7 +20322,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -20332,7 +20332,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -30507,13 +30507,13 @@ var require_reusify = __commonJS({
current.next = null;
return current;
}
function release3(obj) {
function release2(obj) {
tail.next = obj;
tail = obj;
}
return {
get,
release: release3
release: release2
};
}
module2.exports = reusify;
@@ -30582,7 +30582,7 @@ var require_queue = __commonJS({
self2.paused = false;
for (var i = 0; i < self2.concurrency; i++) {
_running++;
release3();
release2();
}
}
function idle() {
@@ -30591,7 +30591,7 @@ var require_queue = __commonJS({
function push(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -30611,7 +30611,7 @@ var require_queue = __commonJS({
function unshift(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -30628,7 +30628,7 @@ var require_queue = __commonJS({
worker.call(context2, current.value, current.worked);
}
}
function release3(holder) {
function release2(holder) {
if (holder) {
cache.release(holder);
}
@@ -32355,7 +32355,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -37445,8 +37444,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -37522,20 +37521,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -61017,7 +61016,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -62825,7 +62824,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -62972,7 +62971,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -71818,7 +71817,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -71834,7 +71833,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -83269,161 +83268,27 @@ var io2 = __toESM(require_io());
// src/util.ts
var fs4 = __toESM(require("fs"));
var fsPromises4 = __toESM(require("fs/promises"));
var os = __toESM(require("os"));
var path5 = __toESM(require("path"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/del/index.js
var import_promises5 = __toESM(require("node:fs/promises"), 1);
var import_node_path6 = __toESM(require("node:path"), 1);
var import_node_process5 = __toESM(require("node:process"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_process4 = __toESM(require("node:process"), 1);
// node_modules/globby/index.js
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_fs3 = __toESM(require("node:fs"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
// node_modules/globby/node_modules/@sindresorhus/merge-streams/index.js
var import_node_events = require("node:events");
var import_node_stream = require("node:stream");
var import_promises2 = require("node:stream/promises");
var import_promises = require("node:stream/promises");
function mergeStreams(streams) {
if (!Array.isArray(streams)) {
throw new TypeError(`Expected an array, got \`${typeof streams}\`.`);
@@ -83498,7 +83363,7 @@ var onMergedStreamFinished = async (passThroughStream, streams) => {
}
};
var onMergedStreamEnd = async (passThroughStream, { signal }) => {
await (0, import_promises2.finished)(passThroughStream, { signal, cleanup: true });
await (0, import_promises.finished)(passThroughStream, { signal, cleanup: true });
};
var onInputStreamsUnpipe = async (passThroughStream, streams, { signal }) => {
for await (const [unpipedStream] of (0, import_node_events.on)(passThroughStream, "unpipe", { signal })) {
@@ -83548,7 +83413,7 @@ var afterMergedStreamFinished = async (onFinished, stream2) => {
};
var onInputStreamEnd = async ({ passThroughStream, stream: stream2, streams, ended, aborted, controller: { signal } }) => {
try {
await (0, import_promises2.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
await (0, import_promises.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
if (streams.has(stream2)) {
ended.add(stream2);
}
@@ -83602,13 +83467,13 @@ var import_fast_glob2 = __toESM(require_out4(), 1);
// node_modules/path-type/index.js
var import_node_fs = __toESM(require("node:fs"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_promises2 = __toESM(require("node:fs/promises"), 1);
async function isType(fsStatType, statsMethodName, filePath) {
if (typeof filePath !== "string") {
throw new TypeError(`Expected a string, got ${typeof filePath}`);
}
try {
const stats = await import_promises3.default[fsStatType](filePath);
const stats = await import_promises2.default[fsStatType](filePath);
return stats[statsMethodName]();
} catch (error2) {
if (error2.code === "ENOENT") {
@@ -83638,20 +83503,20 @@ var isDirectorySync = isTypeSync.bind(void 0, "statSync", "isDirectory");
var isSymlinkSync = isTypeSync.bind(void 0, "lstatSync", "isSymbolicLink");
// node_modules/unicorn-magic/node.js
var import_node_util2 = require("node:util");
var import_node_child_process2 = require("node:child_process");
var import_node_util = require("node:util");
var import_node_child_process = require("node:child_process");
var import_node_url = require("node:url");
var execFileOriginal = (0, import_node_util2.promisify)(import_node_child_process2.execFile);
var execFileOriginal = (0, import_node_util.promisify)(import_node_child_process.execFile);
function toPath(urlOrPath) {
return urlOrPath instanceof URL ? (0, import_node_url.fileURLToPath)(urlOrPath) : urlOrPath;
}
var TEN_MEGABYTES_IN_BYTES = 10 * 1024 * 1024;
// node_modules/globby/ignore.js
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_process = __toESM(require("node:process"), 1);
var import_node_fs2 = __toESM(require("node:fs"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_node_path = __toESM(require("node:path"), 1);
var import_fast_glob = __toESM(require_out4(), 1);
var import_ignore = __toESM(require_ignore(), 1);
@@ -83679,16 +83544,16 @@ var ignoreFilesGlobOptions = {
dot: true
};
var GITIGNORE_FILES_PATTERN = "**/.gitignore";
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path2.default.posix.join(base, pattern.slice(1)) : import_node_path2.default.posix.join(base, pattern);
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path.default.posix.join(base, pattern.slice(1)) : import_node_path.default.posix.join(base, pattern);
var parseIgnoreFile = (file, cwd) => {
const base = slash(import_node_path2.default.relative(cwd, import_node_path2.default.dirname(file.filePath)));
const base = slash(import_node_path.default.relative(cwd, import_node_path.default.dirname(file.filePath)));
return file.content.split(/\r?\n/).filter((line) => line && !line.startsWith("#")).map((pattern) => applyBaseToPattern(pattern, base));
};
var toRelativePath = (fileOrDirectory, cwd) => {
cwd = slash(cwd);
if (import_node_path2.default.isAbsolute(fileOrDirectory)) {
if (import_node_path.default.isAbsolute(fileOrDirectory)) {
if (slash(fileOrDirectory).startsWith(cwd)) {
return import_node_path2.default.relative(cwd, fileOrDirectory);
return import_node_path.default.relative(cwd, fileOrDirectory);
}
throw new Error(`Path ${fileOrDirectory} is not in cwd ${cwd}`);
}
@@ -83704,7 +83569,7 @@ var getIsIgnoredPredicate = (files, cwd) => {
};
};
var normalizeOptions = (options = {}) => ({
cwd: toPath(options.cwd) ?? import_node_process2.default.cwd(),
cwd: toPath(options.cwd) ?? import_node_process.default.cwd(),
suppressErrors: Boolean(options.suppressErrors),
deep: typeof options.deep === "number" ? options.deep : Number.POSITIVE_INFINITY,
ignore: [...options.ignore ?? [], ...defaultIgnoredDirectories]
@@ -83721,7 +83586,7 @@ var isIgnoredByIgnoreFiles = async (patterns, options) => {
const files = await Promise.all(
paths.map(async (filePath) => ({
filePath,
content: await import_promises4.default.readFile(filePath, "utf8")
content: await import_promises3.default.readFile(filePath, "utf8")
}))
);
return getIsIgnoredPredicate(files, cwd);
@@ -83750,14 +83615,14 @@ var assertPatternsInput = (patterns) => {
};
var normalizePathForDirectoryGlob = (filePath, cwd) => {
const path20 = isNegativePattern(filePath) ? filePath.slice(1) : filePath;
return import_node_path3.default.isAbsolute(path20) ? path20 : import_node_path3.default.join(cwd, path20);
return import_node_path2.default.isAbsolute(path20) ? path20 : import_node_path2.default.join(cwd, path20);
};
var getDirectoryGlob = ({ directoryPath, files, extensions }) => {
const extensionGlob = extensions?.length > 0 ? `.${extensions.length > 1 ? `{${extensions.join(",")}}` : extensions[0]}` : "";
return files ? files.map((file) => import_node_path3.default.posix.join(directoryPath, `**/${import_node_path3.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path3.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
return files ? files.map((file) => import_node_path2.default.posix.join(directoryPath, `**/${import_node_path2.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path2.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
};
var directoryToGlob = async (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => {
@@ -83767,7 +83632,7 @@ var directoryToGlob = async (directoryPaths, {
return globs.flat();
};
var directoryToGlobSync = (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => directoryPaths.flatMap((directoryPath) => isDirectorySync(normalizePathForDirectoryGlob(directoryPath, cwd)) ? getDirectoryGlob({ directoryPath, files, extensions }) : directoryPath);
@@ -83825,7 +83690,7 @@ var getFilterSync = (options) => {
var createFilterFunction = (isIgnored) => {
const seen = /* @__PURE__ */ new Set();
return (fastGlobResult) => {
const pathKey = import_node_path3.default.normalize(fastGlobResult.path ?? fastGlobResult);
const pathKey = import_node_path2.default.normalize(fastGlobResult.path ?? fastGlobResult);
if (seen.has(pathKey) || isIgnored && isIgnored(pathKey)) {
return false;
}
@@ -83936,12 +83801,12 @@ var { convertPathToPattern } = import_fast_glob2.default;
var import_is_glob = __toESM(require_is_glob(), 1);
// node_modules/is-path-cwd/index.js
var import_node_process4 = __toESM(require("node:process"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
function isPathCwd(path_) {
let cwd = import_node_process4.default.cwd();
path_ = import_node_path4.default.resolve(path_);
if (import_node_process4.default.platform === "win32") {
let cwd = import_node_process3.default.cwd();
path_ = import_node_path3.default.resolve(path_);
if (import_node_process3.default.platform === "win32") {
cwd = cwd.toLowerCase();
path_ = path_.toLowerCase();
}
@@ -83949,11 +83814,11 @@ function isPathCwd(path_) {
}
// node_modules/del/node_modules/is-path-inside/index.js
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
function isPathInside(childPath, parentPath) {
const relation = import_node_path5.default.relative(parentPath, childPath);
const relation = import_node_path4.default.relative(parentPath, childPath);
return Boolean(
relation && relation !== ".." && !relation.startsWith(`..${import_node_path5.default.sep}`) && relation !== import_node_path5.default.resolve(childPath)
relation && relation !== ".." && !relation.startsWith(`..${import_node_path4.default.sep}`) && relation !== import_node_path4.default.resolve(childPath)
);
}
@@ -84092,14 +83957,14 @@ function safeCheck(file, cwd) {
function normalizePatterns(patterns) {
patterns = Array.isArray(patterns) ? patterns : [patterns];
patterns = patterns.map((pattern) => {
if (import_node_process5.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
if (import_node_process4.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
return slash(pattern);
}
return pattern;
});
return patterns;
}
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5.default.cwd(), onProgress = () => {
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process4.default.cwd(), onProgress = () => {
}, ...options } = {}) {
options = {
expandDirectories: false,
@@ -84120,12 +83985,12 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
let deletedCount = 0;
const mapper = async (file) => {
file = import_node_path6.default.resolve(cwd, file);
file = import_node_path5.default.resolve(cwd, file);
if (!force) {
safeCheck(file, cwd);
}
if (!dryRun) {
await import_promises5.default.rm(file, { recursive: true, force: true });
await import_promises4.default.rm(file, { recursive: true, force: true });
}
deletedCount += 1;
onProgress({
@@ -84142,7 +84007,7 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
// node_modules/get-folder-size/index.js
var import_node_path7 = require("node:path");
var import_node_path6 = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -84167,7 +84032,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path7.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path6.join)(itemPath, directoryItem))
)
);
}
@@ -86809,8 +86674,8 @@ function getExtraOptionsEnvParam() {
);
}
}
function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, platform3) {
const fixedAmount = 1024 * (platform3 === "win32" ? 1.5 : 1);
function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, platform2) {
const fixedAmount = 1024 * (platform2 === "win32" ? 1.5 : 1);
const scaledAmount = getReservedRamScaleFactor() * Math.max(totalMemoryMegaBytes - 8 * 1024, 0);
return fixedAmount + scaledAmount;
}
@@ -86824,7 +86689,7 @@ function getReservedRamScaleFactor() {
}
return envVar / 100;
}
function getMemoryFlagValueForPlatform(userInput, totalMemoryBytes, platform3) {
function getMemoryFlagValueForPlatform(userInput, totalMemoryBytes, platform2) {
let memoryToUseMegaBytes;
if (userInput) {
memoryToUseMegaBytes = Number(userInput);
@@ -86837,7 +86702,7 @@ function getMemoryFlagValueForPlatform(userInput, totalMemoryBytes, platform3) {
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes(
totalMemoryMegaBytes,
platform3
platform2
);
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
}
@@ -87204,16 +87069,14 @@ function prettyPrintPack(pack) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises4.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -87222,8 +87085,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -87263,34 +87126,6 @@ var BuildMode = /* @__PURE__ */ ((BuildMode3) => {
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
async function cleanUpGlob(glob2, name, logger) {
logger.debug(`Cleaning up ${name}.`);
try {
@@ -90151,7 +89986,7 @@ function getCliConfigCategoryIfExists(cliError) {
}
function isUnsupportedPlatform() {
return !SUPPORTED_PLATFORMS.some(
([platform3, arch2]) => platform3 === process.platform && arch2 === process.arch
([platform2, arch2]) => platform2 === process.platform && arch2 === process.arch
);
}
function getUnsupportedPlatformError(cliError) {
@@ -90513,17 +90348,17 @@ function getCodeQLBundleExtension(compressionMethod) {
}
function getCodeQLBundleName(compressionMethod) {
const extension = getCodeQLBundleExtension(compressionMethod);
let platform3;
let platform2;
if (process.platform === "win32") {
platform3 = "win64";
platform2 = "win64";
} else if (process.platform === "linux") {
platform3 = "linux64";
platform2 = "linux64";
} else if (process.platform === "darwin") {
platform3 = "osx64";
platform2 = "osx64";
} else {
return `codeql-bundle${extension}`;
}
return `codeql-bundle-${platform3}${extension}`;
return `codeql-bundle-${platform2}${extension}`;
}
function getCodeQLActionRepository(logger) {
if (isRunningLocalAction()) {
@@ -90557,12 +90392,12 @@ async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod
}
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release3 = await getApiClient().rest.repos.getReleaseByTag({
const release2 = await getApiClient().rest.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: tagName
});
for (const asset of release3.data.assets) {
for (const asset of release2.data.assets) {
if (asset.name === codeQLBundleName) {
logger.info(
`Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.`
@@ -91002,14 +90837,14 @@ async function getNightlyToolsUrl(logger) {
zstdAvailability.available
) ? "zstd" : "gzip";
try {
const release3 = await getApiClient().rest.repos.listReleases({
const release2 = await getApiClient().rest.repos.listReleases({
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
per_page: 1,
page: 1,
prerelease: true
});
const latestRelease = release3.data[0];
const latestRelease = release2.data[0];
if (!latestRelease) {
throw new Error("Could not find the latest nightly release.");
}

View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify2 } = require("util");
var { promisify } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify2(this[kOriginalClose])();
await promisify(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify2 } = require("util");
var { promisify } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify2(this[kOriginalClose])();
await promisify(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -24870,8 +24870,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -24882,7 +24882,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -24973,7 +24973,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -25039,7 +25039,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -25049,7 +25049,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -26506,7 +26506,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -31445,8 +31444,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -31522,20 +31521,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -55017,7 +55016,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -56825,7 +56824,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -56972,7 +56971,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -65818,7 +65817,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -65834,7 +65833,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -76112,148 +76111,14 @@ var github = __toESM(require_github());
var io2 = __toESM(require_io());
// src/util.ts
var fsPromises = __toESM(require("fs/promises"));
var path = __toESM(require("path"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/get-folder-size/index.js
var import_node_path2 = require("node:path");
var import_node_path = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -76278,7 +76143,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path2.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path.join)(itemPath, directoryItem))
)
);
}
@@ -79059,16 +78924,14 @@ function getErrorMessage(error2) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -79077,8 +78940,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -79104,34 +78967,6 @@ function checkActionVersion(version, githubVersion) {
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
async function asyncSome(array, predicate) {
const results = await Promise.all(array.map(predicate));
return results.some((result) => result);
@@ -79586,7 +79421,7 @@ function getCliConfigCategoryIfExists(cliError) {
}
function isUnsupportedPlatform() {
return !SUPPORTED_PLATFORMS.some(
([platform2, arch]) => platform2 === process.platform && arch === process.arch
([platform, arch]) => platform === process.platform && arch === process.arch
);
}
function getUnsupportedPlatformError(cliError) {

View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -28577,13 +28577,13 @@ var require_reusify = __commonJS({
current.next = null;
return current;
}
function release3(obj) {
function release2(obj) {
tail.next = obj;
tail = obj;
}
return {
get,
release: release3
release: release2
};
}
module2.exports = reusify;
@@ -28652,7 +28652,7 @@ var require_queue = __commonJS({
self2.paused = false;
for (var i = 0; i < self2.concurrency; i++) {
_running++;
release3();
release2();
}
}
function idle() {
@@ -28661,7 +28661,7 @@ var require_queue = __commonJS({
function push(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28681,7 +28681,7 @@ var require_queue = __commonJS({
function unshift(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28698,7 +28698,7 @@ var require_queue = __commonJS({
worker.call(context2, current.value, current.worked);
}
}
function release3(holder) {
function release2(holder) {
if (holder) {
cache.release(holder);
}
@@ -30719,8 +30719,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -30731,7 +30731,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -30822,7 +30822,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -30888,7 +30888,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -30898,7 +30898,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -32355,7 +32355,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -35997,8 +35996,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -36074,20 +36073,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -59569,7 +59568,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -61377,7 +61376,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -61524,7 +61523,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -70370,7 +70369,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -70386,7 +70385,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -82016,160 +82015,26 @@ var github = __toESM(require_github());
var io2 = __toESM(require_io());
// src/util.ts
var fsPromises4 = __toESM(require("fs/promises"));
var path5 = __toESM(require("path"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/del/index.js
var import_promises5 = __toESM(require("node:fs/promises"), 1);
var import_node_path6 = __toESM(require("node:path"), 1);
var import_node_process5 = __toESM(require("node:process"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_process4 = __toESM(require("node:process"), 1);
// node_modules/globby/index.js
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_fs3 = __toESM(require("node:fs"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
// node_modules/globby/node_modules/@sindresorhus/merge-streams/index.js
var import_node_events = require("node:events");
var import_node_stream = require("node:stream");
var import_promises2 = require("node:stream/promises");
var import_promises = require("node:stream/promises");
function mergeStreams(streams) {
if (!Array.isArray(streams)) {
throw new TypeError(`Expected an array, got \`${typeof streams}\`.`);
@@ -82244,7 +82109,7 @@ var onMergedStreamFinished = async (passThroughStream, streams) => {
}
};
var onMergedStreamEnd = async (passThroughStream, { signal }) => {
await (0, import_promises2.finished)(passThroughStream, { signal, cleanup: true });
await (0, import_promises.finished)(passThroughStream, { signal, cleanup: true });
};
var onInputStreamsUnpipe = async (passThroughStream, streams, { signal }) => {
for await (const [unpipedStream] of (0, import_node_events.on)(passThroughStream, "unpipe", { signal })) {
@@ -82294,7 +82159,7 @@ var afterMergedStreamFinished = async (onFinished, stream2) => {
};
var onInputStreamEnd = async ({ passThroughStream, stream: stream2, streams, ended, aborted, controller: { signal } }) => {
try {
await (0, import_promises2.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
await (0, import_promises.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
if (streams.has(stream2)) {
ended.add(stream2);
}
@@ -82348,13 +82213,13 @@ var import_fast_glob2 = __toESM(require_out4(), 1);
// node_modules/path-type/index.js
var import_node_fs = __toESM(require("node:fs"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_promises2 = __toESM(require("node:fs/promises"), 1);
async function isType(fsStatType, statsMethodName, filePath) {
if (typeof filePath !== "string") {
throw new TypeError(`Expected a string, got ${typeof filePath}`);
}
try {
const stats = await import_promises3.default[fsStatType](filePath);
const stats = await import_promises2.default[fsStatType](filePath);
return stats[statsMethodName]();
} catch (error2) {
if (error2.code === "ENOENT") {
@@ -82384,20 +82249,20 @@ var isDirectorySync = isTypeSync.bind(void 0, "statSync", "isDirectory");
var isSymlinkSync = isTypeSync.bind(void 0, "lstatSync", "isSymbolicLink");
// node_modules/unicorn-magic/node.js
var import_node_util2 = require("node:util");
var import_node_child_process2 = require("node:child_process");
var import_node_util = require("node:util");
var import_node_child_process = require("node:child_process");
var import_node_url = require("node:url");
var execFileOriginal = (0, import_node_util2.promisify)(import_node_child_process2.execFile);
var execFileOriginal = (0, import_node_util.promisify)(import_node_child_process.execFile);
function toPath(urlOrPath) {
return urlOrPath instanceof URL ? (0, import_node_url.fileURLToPath)(urlOrPath) : urlOrPath;
}
var TEN_MEGABYTES_IN_BYTES = 10 * 1024 * 1024;
// node_modules/globby/ignore.js
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_process = __toESM(require("node:process"), 1);
var import_node_fs2 = __toESM(require("node:fs"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_node_path = __toESM(require("node:path"), 1);
var import_fast_glob = __toESM(require_out4(), 1);
var import_ignore = __toESM(require_ignore(), 1);
@@ -82425,16 +82290,16 @@ var ignoreFilesGlobOptions = {
dot: true
};
var GITIGNORE_FILES_PATTERN = "**/.gitignore";
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path2.default.posix.join(base, pattern.slice(1)) : import_node_path2.default.posix.join(base, pattern);
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path.default.posix.join(base, pattern.slice(1)) : import_node_path.default.posix.join(base, pattern);
var parseIgnoreFile = (file, cwd) => {
const base = slash(import_node_path2.default.relative(cwd, import_node_path2.default.dirname(file.filePath)));
const base = slash(import_node_path.default.relative(cwd, import_node_path.default.dirname(file.filePath)));
return file.content.split(/\r?\n/).filter((line) => line && !line.startsWith("#")).map((pattern) => applyBaseToPattern(pattern, base));
};
var toRelativePath = (fileOrDirectory, cwd) => {
cwd = slash(cwd);
if (import_node_path2.default.isAbsolute(fileOrDirectory)) {
if (import_node_path.default.isAbsolute(fileOrDirectory)) {
if (slash(fileOrDirectory).startsWith(cwd)) {
return import_node_path2.default.relative(cwd, fileOrDirectory);
return import_node_path.default.relative(cwd, fileOrDirectory);
}
throw new Error(`Path ${fileOrDirectory} is not in cwd ${cwd}`);
}
@@ -82450,7 +82315,7 @@ var getIsIgnoredPredicate = (files, cwd) => {
};
};
var normalizeOptions = (options = {}) => ({
cwd: toPath(options.cwd) ?? import_node_process2.default.cwd(),
cwd: toPath(options.cwd) ?? import_node_process.default.cwd(),
suppressErrors: Boolean(options.suppressErrors),
deep: typeof options.deep === "number" ? options.deep : Number.POSITIVE_INFINITY,
ignore: [...options.ignore ?? [], ...defaultIgnoredDirectories]
@@ -82467,7 +82332,7 @@ var isIgnoredByIgnoreFiles = async (patterns, options) => {
const files = await Promise.all(
paths.map(async (filePath) => ({
filePath,
content: await import_promises4.default.readFile(filePath, "utf8")
content: await import_promises3.default.readFile(filePath, "utf8")
}))
);
return getIsIgnoredPredicate(files, cwd);
@@ -82496,14 +82361,14 @@ var assertPatternsInput = (patterns) => {
};
var normalizePathForDirectoryGlob = (filePath, cwd) => {
const path12 = isNegativePattern(filePath) ? filePath.slice(1) : filePath;
return import_node_path3.default.isAbsolute(path12) ? path12 : import_node_path3.default.join(cwd, path12);
return import_node_path2.default.isAbsolute(path12) ? path12 : import_node_path2.default.join(cwd, path12);
};
var getDirectoryGlob = ({ directoryPath, files, extensions }) => {
const extensionGlob = extensions?.length > 0 ? `.${extensions.length > 1 ? `{${extensions.join(",")}}` : extensions[0]}` : "";
return files ? files.map((file) => import_node_path3.default.posix.join(directoryPath, `**/${import_node_path3.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path3.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
return files ? files.map((file) => import_node_path2.default.posix.join(directoryPath, `**/${import_node_path2.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path2.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
};
var directoryToGlob = async (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => {
@@ -82513,7 +82378,7 @@ var directoryToGlob = async (directoryPaths, {
return globs.flat();
};
var directoryToGlobSync = (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => directoryPaths.flatMap((directoryPath) => isDirectorySync(normalizePathForDirectoryGlob(directoryPath, cwd)) ? getDirectoryGlob({ directoryPath, files, extensions }) : directoryPath);
@@ -82571,7 +82436,7 @@ var getFilterSync = (options) => {
var createFilterFunction = (isIgnored) => {
const seen = /* @__PURE__ */ new Set();
return (fastGlobResult) => {
const pathKey = import_node_path3.default.normalize(fastGlobResult.path ?? fastGlobResult);
const pathKey = import_node_path2.default.normalize(fastGlobResult.path ?? fastGlobResult);
if (seen.has(pathKey) || isIgnored && isIgnored(pathKey)) {
return false;
}
@@ -82682,12 +82547,12 @@ var { convertPathToPattern } = import_fast_glob2.default;
var import_is_glob = __toESM(require_is_glob(), 1);
// node_modules/is-path-cwd/index.js
var import_node_process4 = __toESM(require("node:process"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
function isPathCwd(path_) {
let cwd = import_node_process4.default.cwd();
path_ = import_node_path4.default.resolve(path_);
if (import_node_process4.default.platform === "win32") {
let cwd = import_node_process3.default.cwd();
path_ = import_node_path3.default.resolve(path_);
if (import_node_process3.default.platform === "win32") {
cwd = cwd.toLowerCase();
path_ = path_.toLowerCase();
}
@@ -82695,11 +82560,11 @@ function isPathCwd(path_) {
}
// node_modules/del/node_modules/is-path-inside/index.js
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
function isPathInside(childPath, parentPath) {
const relation = import_node_path5.default.relative(parentPath, childPath);
const relation = import_node_path4.default.relative(parentPath, childPath);
return Boolean(
relation && relation !== ".." && !relation.startsWith(`..${import_node_path5.default.sep}`) && relation !== import_node_path5.default.resolve(childPath)
relation && relation !== ".." && !relation.startsWith(`..${import_node_path4.default.sep}`) && relation !== import_node_path4.default.resolve(childPath)
);
}
@@ -82838,14 +82703,14 @@ function safeCheck(file, cwd) {
function normalizePatterns(patterns) {
patterns = Array.isArray(patterns) ? patterns : [patterns];
patterns = patterns.map((pattern) => {
if (import_node_process5.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
if (import_node_process4.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
return slash(pattern);
}
return pattern;
});
return patterns;
}
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5.default.cwd(), onProgress = () => {
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process4.default.cwd(), onProgress = () => {
}, ...options } = {}) {
options = {
expandDirectories: false,
@@ -82866,12 +82731,12 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
let deletedCount = 0;
const mapper = async (file) => {
file = import_node_path6.default.resolve(cwd, file);
file = import_node_path5.default.resolve(cwd, file);
if (!force) {
safeCheck(file, cwd);
}
if (!dryRun) {
await import_promises5.default.rm(file, { recursive: true, force: true });
await import_promises4.default.rm(file, { recursive: true, force: true });
}
deletedCount += 1;
onProgress({
@@ -82888,7 +82753,7 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
// node_modules/get-folder-size/index.js
var import_node_path7 = require("node:path");
var import_node_path6 = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -82913,7 +82778,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path7.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path6.join)(itemPath, directoryItem))
)
);
}
@@ -85714,16 +85579,14 @@ function getErrorMessage(error2) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises4.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -85732,8 +85595,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -85759,34 +85622,6 @@ function checkActionVersion(version, githubVersion) {
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
async function cleanUpGlob(glob, name, logger) {
logger.debug(`Cleaning up ${name}.`);
try {
@@ -87040,7 +86875,7 @@ function getCliConfigCategoryIfExists(cliError) {
}
function isUnsupportedPlatform() {
return !SUPPORTED_PLATFORMS.some(
([platform2, arch2]) => platform2 === process.platform && arch2 === process.arch
([platform, arch2]) => platform === process.platform && arch2 === process.arch
);
}
function getUnsupportedPlatformError(cliError) {
@@ -87470,17 +87305,17 @@ function getCodeQLBundleExtension(compressionMethod) {
}
function getCodeQLBundleName(compressionMethod) {
const extension = getCodeQLBundleExtension(compressionMethod);
let platform2;
let platform;
if (process.platform === "win32") {
platform2 = "win64";
platform = "win64";
} else if (process.platform === "linux") {
platform2 = "linux64";
platform = "linux64";
} else if (process.platform === "darwin") {
platform2 = "osx64";
platform = "osx64";
} else {
return `codeql-bundle${extension}`;
}
return `codeql-bundle-${platform2}${extension}`;
return `codeql-bundle-${platform}${extension}`;
}
function getCodeQLActionRepository(logger) {
if (isRunningLocalAction()) {
@@ -87514,12 +87349,12 @@ async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod
}
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release3 = await getApiClient().rest.repos.getReleaseByTag({
const release2 = await getApiClient().rest.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: tagName
});
for (const asset of release3.data.assets) {
for (const asset of release2.data.assets) {
if (asset.name === codeQLBundleName) {
logger.info(
`Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.`
@@ -87959,14 +87794,14 @@ async function getNightlyToolsUrl(logger) {
zstdAvailability.available
) ? "zstd" : "gzip";
try {
const release3 = await getApiClient().rest.repos.listReleases({
const release2 = await getApiClient().rest.repos.listReleases({
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
per_page: 1,
page: 1,
prerelease: true
});
const latestRelease = release3.data[0];
const latestRelease = release2.data[0];
if (!latestRelease) {
throw new Error("Could not find the latest nightly release.");
}

View File

@@ -26506,7 +26506,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",

View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify2 } = require("util");
var { promisify } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify2(this[kOriginalClose])();
await promisify(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify2 } = require("util");
var { promisify } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify2(this[kOriginalClose])();
await promisify(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -20153,8 +20153,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -20165,7 +20165,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -20256,7 +20256,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -20322,7 +20322,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -20332,7 +20332,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -45042,7 +45042,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -49981,8 +49980,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -50058,20 +50057,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -73553,7 +73552,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -75361,7 +75360,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -75508,7 +75507,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -84354,7 +84353,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -84370,7 +84369,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -93311,147 +93310,13 @@ var github = __toESM(require_github());
var io2 = __toESM(require_io());
// src/util.ts
var fsPromises = __toESM(require("fs/promises"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/get-folder-size/index.js
var import_node_path2 = require("node:path");
var import_node_path = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -93476,7 +93341,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path2.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path.join)(itemPath, directoryItem))
)
);
}
@@ -96153,16 +96018,14 @@ function getErrorMessage(error2) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -96171,8 +96034,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -96181,34 +96044,6 @@ async function checkDiskUsage(logger) {
return void 0;
}
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
function isDefined(value) {
return value !== void 0 && value !== null;
}
@@ -96494,8 +96329,8 @@ function getCredentials(logger, registrySecrets, registriesCredentials, language
return out;
}
function getProxyPackage() {
const platform2 = process.platform === "win32" ? "win64" : process.platform === "darwin" ? "osx64" : "linux64";
return `${UPDATEJOB_PROXY}-${platform2}.tar.gz`;
const platform = process.platform === "win32" ? "win64" : process.platform === "darwin" ? "osx64" : "linux64";
return `${UPDATEJOB_PROXY}-${platform}.tar.gz`;
}
function getFallbackUrl(proxyPackage) {
return `${UPDATEJOB_PROXY_URL_PREFIX}${proxyPackage}`;

1
lib/upload-lib.js generated
View File

@@ -33652,7 +33652,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",

View File

@@ -26506,7 +26506,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",

View File

@@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({
var require_mock_client = __commonJS({
"node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Client = require_client();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({
var require_mock_pool = __commonJS({
"node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) {
"use strict";
var { promisify: promisify3 } = require("util");
var { promisify: promisify2 } = require("util");
var Pool = require_pool();
var { buildMockDispatch } = require_mock_utils();
var {
@@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({
return new MockInterceptor(opts, this[kDispatches]);
}
async [kClose]() {
await promisify3(this[kOriginalClose])();
await promisify2(this[kOriginalClose])();
this[kConnected] = 0;
this[kMockAgent][Symbols.kClients].delete(this[kOrigin]);
}
@@ -18148,7 +18148,7 @@ var require_summary = __commonJS({
exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
var os_1 = require("os");
var fs_1 = require("fs");
var { access: access2, appendFile, writeFile } = fs_1.promises;
var { access, appendFile, writeFile } = fs_1.promises;
exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
var Summary = class {
@@ -18171,7 +18171,7 @@ var require_summary = __commonJS({
throw new Error(`Unable to find environment variable for $${exports2.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access2(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
} catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
@@ -28577,13 +28577,13 @@ var require_reusify = __commonJS({
current.next = null;
return current;
}
function release3(obj) {
function release2(obj) {
tail.next = obj;
tail = obj;
}
return {
get,
release: release3
release: release2
};
}
module2.exports = reusify;
@@ -28652,7 +28652,7 @@ var require_queue = __commonJS({
self2.paused = false;
for (var i = 0; i < self2.concurrency; i++) {
_running++;
release3();
release2();
}
}
function idle() {
@@ -28661,7 +28661,7 @@ var require_queue = __commonJS({
function push(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28681,7 +28681,7 @@ var require_queue = __commonJS({
function unshift(value, done) {
var current = cache.get();
current.context = context2;
current.release = release3;
current.release = release2;
current.value = value;
current.callback = done || noop2;
if (_running === self2.concurrency || self2.paused) {
@@ -28698,7 +28698,7 @@ var require_queue = __commonJS({
worker.call(context2, current.value, current.worked);
}
}
function release3(holder) {
function release2(holder) {
if (holder) {
cache.release(holder);
}
@@ -30719,8 +30719,8 @@ var require_semver = __commonJS({
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc(release3, identifier, identifierBase) {
if (release3.startsWith("pre")) {
inc(release2, identifier, identifierBase) {
if (release2.startsWith("pre")) {
if (!identifier && identifierBase === false) {
throw new Error("invalid increment argument: identifier is empty");
}
@@ -30731,7 +30731,7 @@ var require_semver = __commonJS({
}
}
}
switch (release3) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -30822,7 +30822,7 @@ var require_semver = __commonJS({
break;
}
default:
throw new Error(`invalid increment argument: ${release3}`);
throw new Error(`invalid increment argument: ${release2}`);
}
this.raw = this.format();
if (this.build.length) {
@@ -30888,7 +30888,7 @@ var require_inc = __commonJS({
"node_modules/semver/functions/inc.js"(exports2, module2) {
"use strict";
var SemVer = require_semver();
var inc = (version, release3, options, identifier, identifierBase) => {
var inc = (version, release2, options, identifier, identifierBase) => {
if (typeof options === "string") {
identifierBase = identifier;
identifier = options;
@@ -30898,7 +30898,7 @@ var require_inc = __commonJS({
return new SemVer(
version instanceof SemVer ? version.version : version,
options
).inc(release3, identifier, identifierBase).version;
).inc(release2, identifier, identifierBase).version;
} catch (er) {
return null;
}
@@ -32355,7 +32355,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -35997,8 +35996,8 @@ var require_semver3 = __commonJS({
}
} while (++i2);
};
SemVer.prototype.inc = function(release3, identifier) {
switch (release3) {
SemVer.prototype.inc = function(release2, identifier) {
switch (release2) {
case "premajor":
this.prerelease.length = 0;
this.patch = 0;
@@ -36074,20 +36073,20 @@ var require_semver3 = __commonJS({
}
break;
default:
throw new Error("invalid increment argument: " + release3);
throw new Error("invalid increment argument: " + release2);
}
this.format();
this.raw = this.version;
return this;
};
exports2.inc = inc;
function inc(version, release3, loose, identifier) {
function inc(version, release2, loose, identifier) {
if (typeof loose === "string") {
identifier = loose;
loose = void 0;
}
try {
return new SemVer(version, loose).inc(release3, identifier).version;
return new SemVer(version, loose).inc(release2, identifier).version;
} catch (er) {
return null;
}
@@ -59569,7 +59568,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
}
};
var access2 = {
var access = {
parameterPath: ["options", "access"],
mapper: {
serializedName: "x-ms-blob-public-access",
@@ -61377,7 +61376,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
requestId,
accept1,
metadata,
access2,
access,
defaultEncryptionScope,
preventEncryptionScopeOverride
],
@@ -61524,7 +61523,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
accept,
version,
requestId,
access2,
access,
leaseId,
ifModifiedSince,
ifUnmodifiedSince
@@ -70370,7 +70369,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
* @param containerAcl - Array of elements each having a unique Id and details of the access policy.
* @param options - Options to Container Set Access Policy operation.
*/
async setAccessPolicy(access3, containerAcl2, options = {}) {
async setAccessPolicy(access2, containerAcl2, options = {}) {
options.conditions = options.conditions || {};
return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
const acl = [];
@@ -70386,7 +70385,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`;
}
return assertResponse(await this.containerContext.setAccessPolicy({
abortSignal: options.abortSignal,
access: access3,
access: access2,
containerAcl: acl,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: options.conditions,
@@ -84858,160 +84857,26 @@ var github = __toESM(require_github());
var io2 = __toESM(require_io());
// src/util.ts
var fsPromises4 = __toESM(require("fs/promises"));
var path5 = __toESM(require("path"));
var core3 = __toESM(require_core());
var exec = __toESM(require_exec());
var io = __toESM(require_io());
// node_modules/check-disk-space/dist/check-disk-space.mjs
var import_node_child_process = require("node:child_process");
var import_promises = require("node:fs/promises");
var import_node_os = require("node:os");
var import_node_path = require("node:path");
var import_node_process = require("node:process");
var import_node_util = require("node:util");
var InvalidPathError = class _InvalidPathError extends Error {
constructor(message) {
super(message);
this.name = "InvalidPathError";
Object.setPrototypeOf(this, _InvalidPathError.prototype);
}
};
var NoMatchError = class _NoMatchError extends Error {
constructor(message) {
super(message);
this.name = "NoMatchError";
Object.setPrototypeOf(this, _NoMatchError.prototype);
}
};
async function isDirectoryExisting(directoryPath, dependencies) {
try {
await dependencies.fsAccess(directoryPath);
return Promise.resolve(true);
} catch (error2) {
return Promise.resolve(false);
}
}
async function getFirstExistingParentPath(directoryPath, dependencies) {
let parentDirectoryPath = directoryPath;
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
while (!parentDirectoryFound) {
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + "/..");
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
}
return parentDirectoryPath;
}
async function hasPowerShell3(dependencies) {
const major = parseInt(dependencies.release.split(".")[0], 10);
if (major <= 6) {
return false;
}
try {
await dependencies.cpExecFile("where", ["powershell"], { windowsHide: true });
return true;
} catch (error2) {
return false;
}
}
function checkDiskSpace(directoryPath, dependencies = {
platform: import_node_process.platform,
release: (0, import_node_os.release)(),
fsAccess: import_promises.access,
pathNormalize: import_node_path.normalize,
pathSep: import_node_path.sep,
cpExecFile: (0, import_node_util.promisify)(import_node_child_process.execFile)
}) {
function mapOutput(stdout, filter, mapping, coefficient) {
const parsed = stdout.split("\n").map((line) => line.trim()).filter((line) => line.length !== 0).slice(1).map((line) => line.split(/\s+(?=[\d/])/));
const filtered = parsed.filter(filter);
if (filtered.length === 0) {
throw new NoMatchError();
}
const diskData = filtered[0];
return {
diskPath: diskData[mapping.diskPath],
free: parseInt(diskData[mapping.free], 10) * coefficient,
size: parseInt(diskData[mapping.size], 10) * coefficient
};
}
async function check(cmd, filter, mapping, coefficient = 1) {
const [file, ...args] = cmd;
if (file === void 0) {
return Promise.reject(new Error("cmd must contain at least one item"));
}
try {
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
return mapOutput(stdout, filter, mapping, coefficient);
} catch (error2) {
return Promise.reject(error2);
}
}
async function checkWin32(directoryPath2) {
if (directoryPath2.charAt(1) !== ":") {
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath2}`));
}
const powershellCmd = [
"powershell",
"Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size"
];
const wmicCmd = [
"wmic",
"logicaldisk",
"get",
"size,freespace,caption"
];
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
return check(cmd, (driveData) => {
const driveLetter = driveData[0];
return directoryPath2.toUpperCase().startsWith(driveLetter.toUpperCase());
}, {
diskPath: 0,
free: 1,
size: 2
});
}
async function checkUnix(directoryPath2) {
if (!dependencies.pathNormalize(directoryPath2).startsWith(dependencies.pathSep)) {
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath2}`));
}
const pathToCheck = await getFirstExistingParentPath(directoryPath2, dependencies);
return check(
[
"df",
"-Pk",
"--",
pathToCheck
],
() => true,
// We should only get one line, so we did not need to filter
{
diskPath: 5,
free: 3,
size: 1
},
1024
);
}
if (dependencies.platform === "win32") {
return checkWin32(directoryPath);
}
return checkUnix(directoryPath);
}
// node_modules/del/index.js
var import_promises5 = __toESM(require("node:fs/promises"), 1);
var import_node_path6 = __toESM(require("node:path"), 1);
var import_node_process5 = __toESM(require("node:process"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_process4 = __toESM(require("node:process"), 1);
// node_modules/globby/index.js
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_fs3 = __toESM(require("node:fs"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
// node_modules/globby/node_modules/@sindresorhus/merge-streams/index.js
var import_node_events = require("node:events");
var import_node_stream = require("node:stream");
var import_promises2 = require("node:stream/promises");
var import_promises = require("node:stream/promises");
function mergeStreams(streams) {
if (!Array.isArray(streams)) {
throw new TypeError(`Expected an array, got \`${typeof streams}\`.`);
@@ -85086,7 +84951,7 @@ var onMergedStreamFinished = async (passThroughStream, streams) => {
}
};
var onMergedStreamEnd = async (passThroughStream, { signal }) => {
await (0, import_promises2.finished)(passThroughStream, { signal, cleanup: true });
await (0, import_promises.finished)(passThroughStream, { signal, cleanup: true });
};
var onInputStreamsUnpipe = async (passThroughStream, streams, { signal }) => {
for await (const [unpipedStream] of (0, import_node_events.on)(passThroughStream, "unpipe", { signal })) {
@@ -85136,7 +85001,7 @@ var afterMergedStreamFinished = async (onFinished, stream2) => {
};
var onInputStreamEnd = async ({ passThroughStream, stream: stream2, streams, ended, aborted, controller: { signal } }) => {
try {
await (0, import_promises2.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
await (0, import_promises.finished)(stream2, { signal, cleanup: true, readable: true, writable: false });
if (streams.has(stream2)) {
ended.add(stream2);
}
@@ -85190,13 +85055,13 @@ var import_fast_glob2 = __toESM(require_out4(), 1);
// node_modules/path-type/index.js
var import_node_fs = __toESM(require("node:fs"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_promises2 = __toESM(require("node:fs/promises"), 1);
async function isType(fsStatType, statsMethodName, filePath) {
if (typeof filePath !== "string") {
throw new TypeError(`Expected a string, got ${typeof filePath}`);
}
try {
const stats = await import_promises3.default[fsStatType](filePath);
const stats = await import_promises2.default[fsStatType](filePath);
return stats[statsMethodName]();
} catch (error2) {
if (error2.code === "ENOENT") {
@@ -85226,20 +85091,20 @@ var isDirectorySync = isTypeSync.bind(void 0, "statSync", "isDirectory");
var isSymlinkSync = isTypeSync.bind(void 0, "lstatSync", "isSymbolicLink");
// node_modules/unicorn-magic/node.js
var import_node_util2 = require("node:util");
var import_node_child_process2 = require("node:child_process");
var import_node_util = require("node:util");
var import_node_child_process = require("node:child_process");
var import_node_url = require("node:url");
var execFileOriginal = (0, import_node_util2.promisify)(import_node_child_process2.execFile);
var execFileOriginal = (0, import_node_util.promisify)(import_node_child_process.execFile);
function toPath(urlOrPath) {
return urlOrPath instanceof URL ? (0, import_node_url.fileURLToPath)(urlOrPath) : urlOrPath;
}
var TEN_MEGABYTES_IN_BYTES = 10 * 1024 * 1024;
// node_modules/globby/ignore.js
var import_node_process2 = __toESM(require("node:process"), 1);
var import_node_process = __toESM(require("node:process"), 1);
var import_node_fs2 = __toESM(require("node:fs"), 1);
var import_promises4 = __toESM(require("node:fs/promises"), 1);
var import_node_path2 = __toESM(require("node:path"), 1);
var import_promises3 = __toESM(require("node:fs/promises"), 1);
var import_node_path = __toESM(require("node:path"), 1);
var import_fast_glob = __toESM(require_out4(), 1);
var import_ignore = __toESM(require_ignore(), 1);
@@ -85267,16 +85132,16 @@ var ignoreFilesGlobOptions = {
dot: true
};
var GITIGNORE_FILES_PATTERN = "**/.gitignore";
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path2.default.posix.join(base, pattern.slice(1)) : import_node_path2.default.posix.join(base, pattern);
var applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) ? "!" + import_node_path.default.posix.join(base, pattern.slice(1)) : import_node_path.default.posix.join(base, pattern);
var parseIgnoreFile = (file, cwd) => {
const base = slash(import_node_path2.default.relative(cwd, import_node_path2.default.dirname(file.filePath)));
const base = slash(import_node_path.default.relative(cwd, import_node_path.default.dirname(file.filePath)));
return file.content.split(/\r?\n/).filter((line) => line && !line.startsWith("#")).map((pattern) => applyBaseToPattern(pattern, base));
};
var toRelativePath = (fileOrDirectory, cwd) => {
cwd = slash(cwd);
if (import_node_path2.default.isAbsolute(fileOrDirectory)) {
if (import_node_path.default.isAbsolute(fileOrDirectory)) {
if (slash(fileOrDirectory).startsWith(cwd)) {
return import_node_path2.default.relative(cwd, fileOrDirectory);
return import_node_path.default.relative(cwd, fileOrDirectory);
}
throw new Error(`Path ${fileOrDirectory} is not in cwd ${cwd}`);
}
@@ -85292,7 +85157,7 @@ var getIsIgnoredPredicate = (files, cwd) => {
};
};
var normalizeOptions = (options = {}) => ({
cwd: toPath(options.cwd) ?? import_node_process2.default.cwd(),
cwd: toPath(options.cwd) ?? import_node_process.default.cwd(),
suppressErrors: Boolean(options.suppressErrors),
deep: typeof options.deep === "number" ? options.deep : Number.POSITIVE_INFINITY,
ignore: [...options.ignore ?? [], ...defaultIgnoredDirectories]
@@ -85309,7 +85174,7 @@ var isIgnoredByIgnoreFiles = async (patterns, options) => {
const files = await Promise.all(
paths.map(async (filePath) => ({
filePath,
content: await import_promises4.default.readFile(filePath, "utf8")
content: await import_promises3.default.readFile(filePath, "utf8")
}))
);
return getIsIgnoredPredicate(files, cwd);
@@ -85338,14 +85203,14 @@ var assertPatternsInput = (patterns) => {
};
var normalizePathForDirectoryGlob = (filePath, cwd) => {
const path16 = isNegativePattern(filePath) ? filePath.slice(1) : filePath;
return import_node_path3.default.isAbsolute(path16) ? path16 : import_node_path3.default.join(cwd, path16);
return import_node_path2.default.isAbsolute(path16) ? path16 : import_node_path2.default.join(cwd, path16);
};
var getDirectoryGlob = ({ directoryPath, files, extensions }) => {
const extensionGlob = extensions?.length > 0 ? `.${extensions.length > 1 ? `{${extensions.join(",")}}` : extensions[0]}` : "";
return files ? files.map((file) => import_node_path3.default.posix.join(directoryPath, `**/${import_node_path3.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path3.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
return files ? files.map((file) => import_node_path2.default.posix.join(directoryPath, `**/${import_node_path2.default.extname(file) ? file : `${file}${extensionGlob}`}`)) : [import_node_path2.default.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ""}`)];
};
var directoryToGlob = async (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => {
@@ -85355,7 +85220,7 @@ var directoryToGlob = async (directoryPaths, {
return globs.flat();
};
var directoryToGlobSync = (directoryPaths, {
cwd = import_node_process3.default.cwd(),
cwd = import_node_process2.default.cwd(),
files,
extensions
} = {}) => directoryPaths.flatMap((directoryPath) => isDirectorySync(normalizePathForDirectoryGlob(directoryPath, cwd)) ? getDirectoryGlob({ directoryPath, files, extensions }) : directoryPath);
@@ -85413,7 +85278,7 @@ var getFilterSync = (options) => {
var createFilterFunction = (isIgnored) => {
const seen = /* @__PURE__ */ new Set();
return (fastGlobResult) => {
const pathKey = import_node_path3.default.normalize(fastGlobResult.path ?? fastGlobResult);
const pathKey = import_node_path2.default.normalize(fastGlobResult.path ?? fastGlobResult);
if (seen.has(pathKey) || isIgnored && isIgnored(pathKey)) {
return false;
}
@@ -85524,12 +85389,12 @@ var { convertPathToPattern } = import_fast_glob2.default;
var import_is_glob = __toESM(require_is_glob(), 1);
// node_modules/is-path-cwd/index.js
var import_node_process4 = __toESM(require("node:process"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
var import_node_process3 = __toESM(require("node:process"), 1);
var import_node_path3 = __toESM(require("node:path"), 1);
function isPathCwd(path_) {
let cwd = import_node_process4.default.cwd();
path_ = import_node_path4.default.resolve(path_);
if (import_node_process4.default.platform === "win32") {
let cwd = import_node_process3.default.cwd();
path_ = import_node_path3.default.resolve(path_);
if (import_node_process3.default.platform === "win32") {
cwd = cwd.toLowerCase();
path_ = path_.toLowerCase();
}
@@ -85537,11 +85402,11 @@ function isPathCwd(path_) {
}
// node_modules/del/node_modules/is-path-inside/index.js
var import_node_path5 = __toESM(require("node:path"), 1);
var import_node_path4 = __toESM(require("node:path"), 1);
function isPathInside(childPath, parentPath) {
const relation = import_node_path5.default.relative(parentPath, childPath);
const relation = import_node_path4.default.relative(parentPath, childPath);
return Boolean(
relation && relation !== ".." && !relation.startsWith(`..${import_node_path5.default.sep}`) && relation !== import_node_path5.default.resolve(childPath)
relation && relation !== ".." && !relation.startsWith(`..${import_node_path4.default.sep}`) && relation !== import_node_path4.default.resolve(childPath)
);
}
@@ -85680,14 +85545,14 @@ function safeCheck(file, cwd) {
function normalizePatterns(patterns) {
patterns = Array.isArray(patterns) ? patterns : [patterns];
patterns = patterns.map((pattern) => {
if (import_node_process5.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
if (import_node_process4.default.platform === "win32" && (0, import_is_glob.default)(pattern) === false) {
return slash(pattern);
}
return pattern;
});
return patterns;
}
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5.default.cwd(), onProgress = () => {
async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process4.default.cwd(), onProgress = () => {
}, ...options } = {}) {
options = {
expandDirectories: false,
@@ -85708,12 +85573,12 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
let deletedCount = 0;
const mapper = async (file) => {
file = import_node_path6.default.resolve(cwd, file);
file = import_node_path5.default.resolve(cwd, file);
if (!force) {
safeCheck(file, cwd);
}
if (!dryRun) {
await import_promises5.default.rm(file, { recursive: true, force: true });
await import_promises4.default.rm(file, { recursive: true, force: true });
}
deletedCount += 1;
onProgress({
@@ -85730,7 +85595,7 @@ async function deleteAsync(patterns, { force, dryRun, cwd = import_node_process5
}
// node_modules/get-folder-size/index.js
var import_node_path7 = require("node:path");
var import_node_path6 = require("node:path");
async function getFolderSize(itemPath, options) {
return await core(itemPath, options, { errors: true });
}
@@ -85755,7 +85620,7 @@ async function core(rootItemPath, options = {}, returnType = {}) {
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
(directoryItem) => processItem((0, import_node_path7.join)(itemPath, directoryItem))
(directoryItem) => processItem((0, import_node_path6.join)(itemPath, directoryItem))
)
);
}
@@ -88534,16 +88399,14 @@ function getErrorMessage(error2) {
}
async function checkDiskUsage(logger) {
try {
if (process.platform === "darwin" && (process.arch === "arm" || process.arch === "arm64") && !await checkSipEnablement(logger)) {
return void 0;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises4.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE")
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const message = `The Actions runner is running low on disk space (${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = 1024 * 1024 / blockSizeInBytes;
const numBlocksPerGb = 1024 * 1024 * 1024 / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message = `The Actions runner is running low on disk space (${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env["CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */] !== "true") {
logger.warning(message);
} else {
@@ -88552,8 +88415,8 @@ async function checkDiskUsage(logger) {
core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes
};
} catch (error2) {
logger.warning(
@@ -88587,34 +88450,6 @@ function satisfiesGHESVersion(ghesVersion, range, defaultIfInvalid) {
function cloneObject(obj) {
return JSON.parse(JSON.stringify(obj));
}
async function checkSipEnablement(logger) {
if (process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] !== void 0 && ["true", "false"].includes(process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */])) {
return process.env["CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true");
return true;
}
if (sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled."
)) {
core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false");
return false;
}
}
return void 0;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`
);
return void 0;
}
}
async function cleanUpGlob(glob, name, logger) {
logger.debug(`Cleaning up ${name}.`);
try {
@@ -90288,7 +90123,7 @@ function getCliConfigCategoryIfExists(cliError) {
}
function isUnsupportedPlatform() {
return !SUPPORTED_PLATFORMS.some(
([platform2, arch2]) => platform2 === process.platform && arch2 === process.arch
([platform, arch2]) => platform === process.platform && arch2 === process.arch
);
}
function getUnsupportedPlatformError(cliError) {
@@ -90704,17 +90539,17 @@ function getCodeQLBundleExtension(compressionMethod) {
}
function getCodeQLBundleName(compressionMethod) {
const extension = getCodeQLBundleExtension(compressionMethod);
let platform2;
let platform;
if (process.platform === "win32") {
platform2 = "win64";
platform = "win64";
} else if (process.platform === "linux") {
platform2 = "linux64";
platform = "linux64";
} else if (process.platform === "darwin") {
platform2 = "osx64";
platform = "osx64";
} else {
return `codeql-bundle${extension}`;
}
return `codeql-bundle-${platform2}${extension}`;
return `codeql-bundle-${platform}${extension}`;
}
function getCodeQLActionRepository(logger) {
if (isRunningLocalAction()) {
@@ -90748,12 +90583,12 @@ async function getCodeQLBundleDownloadURL(tagName, apiDetails, compressionMethod
}
const [repositoryOwner, repositoryName] = repository.split("/");
try {
const release3 = await getApiClient().rest.repos.getReleaseByTag({
const release2 = await getApiClient().rest.repos.getReleaseByTag({
owner: repositoryOwner,
repo: repositoryName,
tag: tagName
});
for (const asset of release3.data.assets) {
for (const asset of release2.data.assets) {
if (asset.name === codeQLBundleName) {
logger.info(
`Found CodeQL bundle ${codeQLBundleName} in ${repository} on ${apiURL} with URL ${asset.url}.`
@@ -91193,14 +91028,14 @@ async function getNightlyToolsUrl(logger) {
zstdAvailability.available
) ? "zstd" : "gzip";
try {
const release3 = await getApiClient().rest.repos.listReleases({
const release2 = await getApiClient().rest.repos.listReleases({
owner: CODEQL_NIGHTLIES_REPOSITORY_OWNER,
repo: CODEQL_NIGHTLIES_REPOSITORY_NAME,
per_page: 1,
page: 1,
prerelease: true
});
const latestRelease = release3.data[0];
const latestRelease = release2.data[0];
if (!latestRelease) {
throw new Error("Could not find the latest nightly release.");
}

8
package-lock.json generated
View File

@@ -23,7 +23,6 @@
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
"archiver": "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
"del": "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -4286,13 +4285,6 @@
"url": "https://github.com/chalk/chalk?sponsor=1"
}
},
"node_modules/check-disk-space": {
"version": "3.4.0",
"license": "MIT",
"engines": {
"node": ">=16"
}
},
"node_modules/chownr": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",

View File

@@ -38,7 +38,6 @@
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
"archiver": "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
"del": "^8.0.0",
"fast-deep-equal": "^3.1.3",

View File

@@ -534,3 +534,12 @@ test("getCgroupCpuCountFromCpus returns undefined if the CPU file exists but is
);
});
});
test("checkDiskUsage succeeds and produces positive numbers", async (t) => {
process.env["GITHUB_WORKSPACE"] = os.tmpdir();
const diskUsage = await util.checkDiskUsage(getRunnerLogger(true));
if (t.truthy(diskUsage)) {
t.true(diskUsage.numAvailableBytes > 0);
t.true(diskUsage.numTotalBytes > 0);
}
});

View File

@@ -1,11 +1,11 @@
import * as fs from "fs";
import * as fsPromises from "fs/promises";
import * as os from "os";
import * as path from "path";
import * as core from "@actions/core";
import * as exec from "@actions/exec/lib/exec";
import * as io from "@actions/io";
import checkDiskSpace from "check-disk-space";
import * as del from "del";
import getFolderSize from "get-folder-size";
import * as yaml from "js-yaml";
@@ -1099,24 +1099,17 @@ export async function checkDiskUsage(
logger: Logger,
): Promise<DiskUsage | undefined> {
try {
// We avoid running the `df` binary under the hood for macOS ARM runners with SIP disabled.
if (
process.platform === "darwin" &&
(process.arch === "arm" || process.arch === "arm64") &&
!(await checkSipEnablement(logger))
) {
return undefined;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE"),
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = (1024 * 1024) / blockSizeInBytes;
const numBlocksPerGb = (1024 * 1024 * 1024) / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message =
"The Actions runner is running low on disk space " +
`(${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
`(${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env[EnvVar.HAS_WARNED_ABOUT_DISK_SPACE] !== "true") {
logger.warning(message);
} else {
@@ -1125,8 +1118,8 @@ export async function checkDiskUsage(
core.exportVariable(EnvVar.HAS_WARNED_ABOUT_DISK_SPACE, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size,
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes,
};
} catch (error) {
logger.warning(