diff --git a/eslint.config.mjs b/eslint.config.mjs index a27a005fe..6bd669df8 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -76,7 +76,7 @@ export default [ typescript: {}, }, - "import/ignore": ["sinon", "uuid", "@octokit/plugin-retry"], + "import/ignore": ["sinon", "uuid", "@octokit/plugin-retry", "get-folder-size"], }, rules: { diff --git a/lib/analyze-action-post.js b/lib/analyze-action-post.js index fa389abdf..34bb97bb1 100644 --- a/lib/analyze-action-post.js +++ b/lib/analyze-action-post.js @@ -34336,127 +34336,6 @@ var require_del = __commonJS({ } }); -// node_modules/tiny-each-async/index.js -var require_tiny_each_async = __commonJS({ - "node_modules/tiny-each-async/index.js"(exports2, module2) { - "use strict"; - module2.exports = function eachAsync(arr, parallelLimit, iteratorFn, cb) { - var pending = 0; - var index = 0; - var lastIndex = arr.length - 1; - var called = false; - var limit; - var callback; - var iterate; - if (typeof parallelLimit === "number") { - limit = parallelLimit; - iterate = iteratorFn; - callback = cb || function noop() { - }; - } else { - iterate = parallelLimit; - callback = iteratorFn || function noop() { - }; - limit = arr.length; - } - if (!arr.length) { - return callback(); - } - var iteratorLength = iterate.length; - var shouldCallNextIterator = function shouldCallNextIterator2() { - return !called && pending < limit && index < lastIndex; - }; - var iteratorCallback = function iteratorCallback2(err) { - if (called) { - return; - } - pending--; - if (err || index === lastIndex && !pending) { - called = true; - callback(err); - } else if (shouldCallNextIterator()) { - processIterator(++index); - } - }; - var processIterator = function processIterator2() { - pending++; - var args = iteratorLength === 2 ? [arr[index], iteratorCallback] : [arr[index], index, iteratorCallback]; - iterate.apply(null, args); - if (shouldCallNextIterator()) { - processIterator2(++index); - } - }; - processIterator(); - }; - } -}); - -// node_modules/get-folder-size/index.js -var require_get_folder_size = __commonJS({ - "node_modules/get-folder-size/index.js"(exports2, module2) { - "use strict"; - var fs7 = require("fs"); - var path6 = require("path"); - var eachAsync = require_tiny_each_async(); - function readSizeRecursive(seen, item, ignoreRegEx, callback) { - let cb; - let ignoreRegExp; - if (!callback) { - cb = ignoreRegEx; - ignoreRegExp = null; - } else { - cb = callback; - ignoreRegExp = ignoreRegEx; - } - fs7.lstat(item, function lstat(e, stats) { - let total = !e ? stats.size || 0 : 0; - if (stats) { - if (seen.has(stats.ino)) { - return cb(null, 0); - } - seen.add(stats.ino); - } - if (!e && stats.isDirectory()) { - fs7.readdir(item, (err, list) => { - if (err) { - return cb(err); - } - eachAsync( - list, - 5e3, - (dirItem, next) => { - readSizeRecursive( - seen, - path6.join(item, dirItem), - ignoreRegExp, - (error2, size) => { - if (!error2) { - total += size; - } - next(error2); - } - ); - }, - (finalErr) => { - cb(finalErr, total); - } - ); - }); - } else { - if (ignoreRegExp && ignoreRegExp.test(item)) { - total = 0; - } - cb(e, total); - } - }); - } - module2.exports = (...args) => { - args.unshift(/* @__PURE__ */ new Set()); - return readSizeRecursive(...args); - }; - } -}); - // node_modules/semver/internal/constants.js var require_constants9 = __commonJS({ "node_modules/semver/internal/constants.js"(exports2, module2) { @@ -36412,7 +36291,7 @@ var require_package = __commonJS({ "fast-deep-equal": "^3.1.3", "file-url": "^3.0.0", "follow-redirects": "^1.15.11", - "get-folder-size": "^2.0.1", + "get-folder-size": "^5.0.0", "js-yaml": "^4.1.0", jsonschema: "1.4.1", long: "^5.3.2", @@ -38076,7 +37955,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -38086,15 +37965,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core13.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -38758,7 +38637,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs7 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path6 = __importStar4(require("path")); @@ -38809,7 +38688,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core13.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await4(fs7.promises.lstat(searchPath)); } catch (err) { @@ -38881,7 +38760,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core13.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -38897,7 +38776,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -40221,7 +40100,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob2 = __importStar4(require_glob2()); var io6 = __importStar4(require_io()); @@ -40274,7 +40153,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path6.relative(workspace, file).replace(new RegExp(`\\${path6.sep}`, "g"), "/"); - core13.debug(`Matched: ${relativeFile}`); + core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -40304,7 +40183,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core13.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -40315,10 +40194,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core13.debug(err.message); + core14.debug(err.message); } versionOutput = versionOutput.trim(); - core13.debug(versionOutput); + core14.debug(versionOutput); return versionOutput; }); } @@ -40326,7 +40205,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core13.debug(`zstd version: ${version}`); + core14.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -75638,7 +75517,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -75680,7 +75559,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -75735,14 +75614,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core13.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core13.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -75813,7 +75692,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -75874,9 +75753,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core13.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core13.debug(`${name} - Error is not retryable`); + core14.debug(`${name} - Error is not retryable`); break; } yield sleep(delay); @@ -75981,7 +75860,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -76019,7 +75898,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core13.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -76053,7 +75932,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -76103,7 +75982,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core13.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -76114,7 +75993,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core13.debug("Unable to validate download, no Content-Length header"); + core14.debug("Unable to validate download, no Content-Length header"); } }); } @@ -76234,7 +76113,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core13.debug("Unable to determine content length, downloading file with http-client..."); + core14.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -76314,7 +76193,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -76334,9 +76213,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core13.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -76373,12 +76252,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Download concurrency: ${result.downloadConcurrency}`); - core13.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core13.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core13.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core13.debug(`Lookup only: ${result.lookupOnly}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Download concurrency: ${result.downloadConcurrency}`); + core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core14.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -76558,7 +76437,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs7 = __importStar4(require("fs")); @@ -76576,7 +76455,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core13.debug(`Resource Url: ${url}`); + core14.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -76604,7 +76483,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core13.isDebug()) { + if (core14.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -76617,9 +76496,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core13.setSecret(cacheDownloadUrl); - core13.debug(`Cache Result:`); - core13.debug(JSON.stringify(cacheResult)); + core14.setSecret(cacheDownloadUrl); + core14.debug(`Cache Result:`); + core14.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -76634,10 +76513,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core13.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core13.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -76682,7 +76561,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core13.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -76704,7 +76583,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core13.debug("Awaiting all uploads"); + core14.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -76747,16 +76626,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core13.debug("Upload cache"); + core14.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core13.debug("Commiting cache"); + core14.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core13.info("Cache saved successfully"); + core14.info("Cache saved successfully"); } }); } @@ -82182,7 +82061,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var path6 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -82235,7 +82114,7 @@ var require_cache3 = __commonJS({ function restoreCache4(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -82251,8 +82130,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82270,19 +82149,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path6.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -82290,16 +82169,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core13.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82310,8 +82189,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82329,30 +82208,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core13.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core14.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core13.info(`Cache hit for restore-key: ${response.matchedKey}`); + core14.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core13.info(`Cache hit for: ${response.matchedKey}`); + core14.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path6.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive path: ${archivePath}`); - core13.debug(`Starting download of archive to: ${archivePath}`); + core14.debug(`Archive path: ${archivePath}`); + core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core13.isDebug()) { + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -82360,9 +82239,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core13.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -82371,7 +82250,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82380,7 +82259,7 @@ var require_cache3 = __commonJS({ function saveCache4(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -82399,26 +82278,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path6.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -82431,26 +82310,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core13.debug(`Saving Cache (ID: ${cacheId})`); + core14.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -82463,26 +82342,26 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path6.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } options.archiveSizeBytes = archiveFileSize; - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -82496,10 +82375,10 @@ var require_cache3 = __commonJS({ } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core13.debug(`Failed to reserve cache: ${error2}`); + core14.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core13.debug(`Attempting to upload cache located at: ${archivePath}`); + core14.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -82507,7 +82386,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core13.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); } @@ -82517,19 +82396,19 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -82737,7 +82616,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -82760,10 +82639,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core13.info(err.message); + core14.info(err.message); } const seconds = this.getSleepAmount(); - core13.info(`Waiting ${seconds} seconds before trying again`); + core14.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -82843,7 +82722,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs7 = __importStar4(require("fs")); @@ -82872,8 +82751,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path6.join(_getTempDirectory(), crypto.randomUUID()); yield io6.mkdirP(path6.dirname(dest)); - core13.debug(`Downloading ${url}`); - core13.debug(`Destination ${dest}`); + core14.debug(`Downloading ${url}`); + core14.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -82900,7 +82779,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core13.debug("set auth"); + core14.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -82909,7 +82788,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError(response.message.statusCode); - core13.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core14.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream.pipeline); @@ -82918,16 +82797,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs7.createWriteStream(dest)); - core13.debug("download complete"); + core14.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core13.debug("download failed"); + core14.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core13.debug(`Failed to delete '${dest}'. ${err.message}`); + core14.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -82942,7 +82821,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core13.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -82992,7 +82871,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core13.debug("Checking tar --version"); + core14.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -83002,7 +82881,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core13.debug(versionOutput.trim()); + core14.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -83010,7 +82889,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core13.isDebug() && !flags.includes("v")) { + if (core14.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -83042,7 +82921,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core13.isDebug()) { + if (core14.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -83087,7 +82966,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core13.debug(`Using pwsh at path: ${pwshPath}`); + core14.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -83107,7 +82986,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core13.debug(`Using powershell at path: ${powershellPath}`); + core14.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -83116,7 +82995,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core13.isDebug()) { + if (!core14.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -83127,8 +83006,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch = arch || os.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch}`); - core13.debug(`source dir: ${sourceDir}`); + core14.debug(`Caching tool ${tool} ${version} ${arch}`); + core14.debug(`source dir: ${sourceDir}`); if (!fs7.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -83146,14 +83025,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch = arch || os.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch}`); - core13.debug(`source file: ${sourceFile}`); + core14.debug(`Caching tool ${tool} ${version} ${arch}`); + core14.debug(`source file: ${sourceFile}`); if (!fs7.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch); const destPath = path6.join(destFolder, targetFile); - core13.debug(`destination file ${destPath}`); + core14.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch); return destFolder; @@ -83177,12 +83056,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path6.join(_getCacheDirectory(), toolName, versionSpec, arch); - core13.debug(`checking cache: ${cachePath}`); + core14.debug(`checking cache: ${cachePath}`); if (fs7.existsSync(cachePath) && fs7.existsSync(`${cachePath}.complete`)) { - core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); + core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { - core13.debug("not found"); + core14.debug("not found"); } } return toolPath; @@ -83213,7 +83092,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core13.debug("set auth"); + core14.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -83234,7 +83113,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core13.debug("Invalid json"); + core14.debug("Invalid json"); } } return releases; @@ -83260,7 +83139,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path6.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); - core13.debug(`destination ${folderPath}`); + core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -83272,19 +83151,19 @@ var require_tool_cache = __commonJS({ const folderPath = path6.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; fs7.writeFileSync(markerPath, ""); - core13.debug("finished caching tool"); + core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core13.debug(`isExplicit: ${c}`); + core14.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core13.debug(`explicit? ${valid3}`); + core14.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core13.debug(`evaluating ${versions.length} versions`); + core14.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -83300,9 +83179,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core13.debug(`matched: ${version}`); + core14.debug(`matched: ${version}`); } else { - core13.debug("match not found"); + core14.debug("match not found"); } return version; } @@ -85976,14 +85855,14 @@ var require_retention = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getExpiration = void 0; var generated_1 = require_generated(); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getExpiration(retentionDays) { if (!retentionDays) { return void 0; } const maxRetentionDays = getRetentionDays(); if (maxRetentionDays && maxRetentionDays < retentionDays) { - core13.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); + core14.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); retentionDays = maxRetentionDays; } const expirationDate = /* @__PURE__ */ new Date(); @@ -86575,7 +86454,7 @@ var require_util11 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getBackendIdsFromToken = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var config_1 = require_config2(); var jwt_decode_1 = __importDefault4(require_jwt_decode_cjs()); var InvalidJwtError = new Error("Failed to get backend IDs: The provided JWT token is invalid and/or missing claims"); @@ -86601,8 +86480,8 @@ var require_util11 = __commonJS({ workflowRunBackendId: scopeParts[1], workflowJobRunBackendId: scopeParts[2] }; - core13.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); - core13.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); + core14.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); + core14.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); return ids; } throw InvalidJwtError; @@ -86673,7 +86552,7 @@ var require_blob_upload = __commonJS({ exports2.uploadZipToBlobStorage = void 0; var storage_blob_1 = require_dist7(); var config_1 = require_config2(); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var crypto = __importStar4(require("crypto")); var stream = __importStar4(require("stream")); var errors_1 = require_errors3(); @@ -86699,9 +86578,9 @@ var require_blob_upload = __commonJS({ const bufferSize = (0, config_1.getUploadChunkSize)(); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); - core13.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); + core14.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); const uploadCallback = (progress) => { - core13.info(`Uploaded bytes ${progress.loadedBytes}`); + core14.info(`Uploaded bytes ${progress.loadedBytes}`); uploadByteCount = progress.loadedBytes; lastProgressTime = Date.now(); }; @@ -86715,7 +86594,7 @@ var require_blob_upload = __commonJS({ const hashStream = crypto.createHash("sha256"); zipUploadStream.pipe(uploadStream); zipUploadStream.pipe(hashStream).setEncoding("hex"); - core13.info("Beginning upload of artifact content to blob storage"); + core14.info("Beginning upload of artifact content to blob storage"); try { yield Promise.race([ blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options), @@ -86729,12 +86608,12 @@ var require_blob_upload = __commonJS({ } finally { abortController.abort(); } - core13.info("Finished uploading artifact content to blob storage!"); + core14.info("Finished uploading artifact content to blob storage!"); hashStream.end(); sha256Hash = hashStream.read(); - core13.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`); + core14.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`); if (uploadByteCount === 0) { - core13.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); + core14.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); } return { uploadSize: uploadByteCount, @@ -89790,7 +89669,7 @@ var require_BufferList = __commonJS({ this.head = this.tail = null; this.length = 0; }; - BufferList.prototype.join = function join6(s) { + BufferList.prototype.join = function join7(s) { if (this.length === 0) return ""; var p = this.head; var ret = "" + p.data; @@ -110793,7 +110672,7 @@ var require_zip2 = __commonJS({ var stream = __importStar4(require("stream")); var promises_1 = require("fs/promises"); var archiver2 = __importStar4(require_archiver()); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var config_1 = require_config2(); exports2.DEFAULT_COMPRESSION_LEVEL = 6; var ZipUploadStream = class extends stream.Transform { @@ -110810,7 +110689,7 @@ var require_zip2 = __commonJS({ exports2.ZipUploadStream = ZipUploadStream; function createZipUploadStream(uploadSpecification, compressionLevel = exports2.DEFAULT_COMPRESSION_LEVEL) { return __awaiter4(this, void 0, void 0, function* () { - core13.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); + core14.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); const zip = archiver2.create("zip", { highWaterMark: (0, config_1.getUploadChunkSize)(), zlib: { level: compressionLevel } @@ -110834,8 +110713,8 @@ var require_zip2 = __commonJS({ } const bufferSize = (0, config_1.getUploadChunkSize)(); const zipUploadStream = new ZipUploadStream(bufferSize); - core13.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); - core13.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); + core14.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); + core14.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); zip.pipe(zipUploadStream); zip.finalize(); return zipUploadStream; @@ -110843,24 +110722,24 @@ var require_zip2 = __commonJS({ } exports2.createZipUploadStream = createZipUploadStream; var zipErrorCallback = (error2) => { - core13.error("An error has occurred while creating the zip file for upload"); - core13.info(error2); + core14.error("An error has occurred while creating the zip file for upload"); + core14.info(error2); throw new Error("An error has occurred during zip creation for the artifact"); }; var zipWarningCallback = (error2) => { if (error2.code === "ENOENT") { - core13.warning("ENOENT warning during artifact zip creation. No such file or directory"); - core13.info(error2); + core14.warning("ENOENT warning during artifact zip creation. No such file or directory"); + core14.info(error2); } else { - core13.warning(`A non-blocking warning has occurred during artifact zip creation: ${error2.code}`); - core13.info(error2); + core14.warning(`A non-blocking warning has occurred during artifact zip creation: ${error2.code}`); + core14.info(error2); } }; var zipFinishCallback = () => { - core13.debug("Zip stream for upload has finished."); + core14.debug("Zip stream for upload has finished."); }; var zipEndCallback = () => { - core13.debug("Zip stream for upload has ended."); + core14.debug("Zip stream for upload has ended."); }; } }); @@ -110925,7 +110804,7 @@ var require_upload_artifact = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadArtifact = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var retention_1 = require_retention(); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation(); var artifact_twirp_client_1 = require_artifact_twirp_client2(); @@ -110972,13 +110851,13 @@ var require_upload_artifact = __commonJS({ value: `sha256:${uploadResult.sha256Hash}` }); } - core13.info(`Finalizing artifact upload`); + core14.info(`Finalizing artifact upload`); const finalizeArtifactResp = yield artifactClient.FinalizeArtifact(finalizeArtifactReq); if (!finalizeArtifactResp.ok) { throw new errors_1.InvalidResponseError("FinalizeArtifact: response from backend was not ok"); } const artifactId = BigInt(finalizeArtifactResp.artifactId); - core13.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); + core14.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); return { size: uploadResult.uploadSize, digest: uploadResult.sha256Hash, @@ -118126,7 +118005,7 @@ var require_download_artifact = __commonJS({ var crypto = __importStar4(require("crypto")); var stream = __importStar4(require("stream")); var github2 = __importStar4(require_github2()); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var httpClient = __importStar4(require_lib()); var unzip_stream_1 = __importDefault4(require_unzip()); var user_agent_1 = require_user_agent2(); @@ -118162,7 +118041,7 @@ var require_download_artifact = __commonJS({ return yield streamExtractExternal(url, directory); } catch (error2) { retryCount++; - core13.debug(`Failed to download artifact after ${retryCount} retries due to ${error2.message}. Retrying in 5 seconds...`); + core14.debug(`Failed to download artifact after ${retryCount} retries due to ${error2.message}. Retrying in 5 seconds...`); yield new Promise((resolve5) => setTimeout(resolve5, 5e3)); } } @@ -118191,7 +118070,7 @@ var require_download_artifact = __commonJS({ extractStream.on("data", () => { timer.refresh(); }).on("error", (error2) => { - core13.debug(`response.message: Artifact download failed: ${error2.message}`); + core14.debug(`response.message: Artifact download failed: ${error2.message}`); clearTimeout(timer); reject(error2); }).pipe(unzip_stream_1.default.Extract({ path: directory })).on("close", () => { @@ -118199,7 +118078,7 @@ var require_download_artifact = __commonJS({ if (hashStream) { hashStream.end(); sha256Digest = hashStream.read(); - core13.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); + core14.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); } resolve5({ sha256Digest: `sha256:${sha256Digest}` }); }).on("error", (error2) => { @@ -118214,7 +118093,7 @@ var require_download_artifact = __commonJS({ const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path); const api = github2.getOctokit(token); let digestMismatch = false; - core13.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); + core14.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); const { headers, status } = yield api.rest.actions.downloadArtifact({ owner: repositoryOwner, repo: repositoryName, @@ -118231,16 +118110,16 @@ var require_download_artifact = __commonJS({ if (!location) { throw new Error(`Unable to redirect to artifact download url`); } - core13.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); + core14.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); try { - core13.info(`Starting download of artifact to: ${downloadPath}`); + core14.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(location, downloadPath); - core13.info(`Artifact download completed successfully.`); + core14.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core13.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core13.debug(`Expected digest: ${options.expectedHash}`); + core14.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core14.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error2) { @@ -118267,7 +118146,7 @@ var require_download_artifact = __commonJS({ Are you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`); } if (artifacts.length > 1) { - core13.warning("Multiple artifacts found, defaulting to first."); + core14.warning("Multiple artifacts found, defaulting to first."); } const signedReq = { workflowRunBackendId: artifacts[0].workflowRunBackendId, @@ -118275,16 +118154,16 @@ Are you trying to download from a different run? Try specifying a github-token w name: artifacts[0].name }; const { signedUrl } = yield artifactClient.GetSignedArtifactURL(signedReq); - core13.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); + core14.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); try { - core13.info(`Starting download of artifact to: ${downloadPath}`); + core14.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(signedUrl, downloadPath); - core13.info(`Artifact download completed successfully.`); + core14.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core13.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core13.debug(`Expected digest: ${options.expectedHash}`); + core14.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core14.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error2) { @@ -118297,10 +118176,10 @@ Are you trying to download from a different run? Try specifying a github-token w function resolveOrCreateDirectory(downloadPath = (0, config_1.getGitHubWorkspaceDir)()) { return __awaiter4(this, void 0, void 0, function* () { if (!(yield exists(downloadPath))) { - core13.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); + core14.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); yield promises_1.default.mkdir(downloadPath, { recursive: true }); } else { - core13.debug(`Artifact destination folder already exists: ${downloadPath}`); + core14.debug(`Artifact destination folder already exists: ${downloadPath}`); } return downloadPath; }); @@ -118341,7 +118220,7 @@ var require_retry_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRetryOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var defaultMaxRetryNumber = 5; var defaultExemptStatusCodes = [400, 401, 403, 404, 422]; function getRetryOptions(defaultOptions, retries = defaultMaxRetryNumber, exemptStatusCodes = defaultExemptStatusCodes) { @@ -118356,7 +118235,7 @@ var require_retry_options = __commonJS({ retryOptions.doNotRetry = exemptStatusCodes; } const requestOptions = Object.assign(Object.assign({}, defaultOptions.request), { retries }); - core13.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); + core14.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); return [retryOptions, requestOptions]; } exports2.getRetryOptions = getRetryOptions; @@ -118513,7 +118392,7 @@ var require_get_artifact = __commonJS({ exports2.getArtifactInternal = exports2.getArtifactPublic = void 0; var github_1 = require_github2(); var plugin_retry_1 = require_dist_node25(); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var utils_1 = require_utils13(); var retry_options_1 = require_retry_options(); var plugin_request_log_1 = require_dist_node24(); @@ -118551,7 +118430,7 @@ var require_get_artifact = __commonJS({ let artifact2 = getArtifactResp.data.artifacts[0]; if (getArtifactResp.data.artifacts.length > 1) { artifact2 = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0]; - core13.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); + core14.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); } return { artifact: { @@ -118584,7 +118463,7 @@ var require_get_artifact = __commonJS({ let artifact2 = res.artifacts[0]; if (res.artifacts.length > 1) { artifact2 = res.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0]; - core13.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); + core14.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); } return { artifact: { @@ -120532,7 +120411,7 @@ var require_requestUtils2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientRequest = exports2.retry = void 0; var utils_1 = require_utils14(); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var config_variables_1 = require_config_variables(); function retry3(name, operation, customErrorMessages, maxAttempts) { return __awaiter4(this, void 0, void 0, function* () { @@ -120559,13 +120438,13 @@ var require_requestUtils2 = __commonJS({ errorMessage = error2.message; } if (!isRetryable) { - core13.info(`${name} - Error is not retryable`); + core14.info(`${name} - Error is not retryable`); if (response) { (0, utils_1.displayHttpDiagnostics)(response); } break; } - core13.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); yield (0, utils_1.sleep)((0, utils_1.getExponentialRetryTimeInMilliseconds)(attempt)); attempt++; } @@ -120649,7 +120528,7 @@ var require_upload_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadHttpClient = void 0; var fs7 = __importStar4(require("fs")); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var tmp = __importStar4(require_tmp_promise()); var stream = __importStar4(require("stream")); var utils_1 = require_utils14(); @@ -120714,7 +120593,7 @@ var require_upload_http_client = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const FILE_CONCURRENCY = (0, config_variables_1.getUploadFileConcurrency)(); const MAX_CHUNK_SIZE = (0, config_variables_1.getUploadChunkSize)(); - core13.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); + core14.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); const parameters = []; let continueOnError = true; if (options) { @@ -120751,15 +120630,15 @@ var require_upload_http_client = __commonJS({ } const startTime = perf_hooks_1.performance.now(); const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); - if (core13.isDebug()) { - core13.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); + if (core14.isDebug()) { + core14.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); } uploadFileSize += uploadFileResult.successfulUploadSize; totalFileSize += uploadFileResult.totalSize; if (uploadFileResult.isSuccess === false) { failedItemsToReport.push(currentFileParameters.file); if (!continueOnError) { - core13.error(`aborting artifact upload`); + core14.error(`aborting artifact upload`); abortPendingFileUploads = true; } } @@ -120768,7 +120647,7 @@ var require_upload_http_client = __commonJS({ }))); this.statusReporter.stop(); this.uploadHttpManager.disposeAndReplaceAllClients(); - core13.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); + core14.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); return { uploadSize: uploadFileSize, totalSize: totalFileSize, @@ -120794,16 +120673,16 @@ var require_upload_http_client = __commonJS({ let uploadFileSize = 0; let isGzip = true; if (!isFIFO && totalFileSize < 65536) { - core13.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); + core14.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); const buffer = yield (0, upload_gzip_1.createGZipFileInBuffer)(parameters.file); let openUploadStream; if (totalFileSize < buffer.byteLength) { - core13.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core14.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); openUploadStream = () => fs7.createReadStream(parameters.file); isGzip = false; uploadFileSize = totalFileSize; } else { - core13.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); + core14.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); openUploadStream = () => { const passThrough = new stream.PassThrough(); passThrough.end(buffer); @@ -120815,7 +120694,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += uploadFileSize; - core13.warning(`Aborting upload for ${parameters.file} due to failure`); + core14.warning(`Aborting upload for ${parameters.file} due to failure`); } return { isSuccess: isUploadSuccessful, @@ -120824,16 +120703,16 @@ var require_upload_http_client = __commonJS({ }; } else { const tempFile = yield tmp.file(); - core13.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); + core14.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); uploadFileSize = yield (0, upload_gzip_1.createGZipFileOnDisk)(parameters.file, tempFile.path); let uploadFilePath = tempFile.path; if (!isFIFO && totalFileSize < uploadFileSize) { - core13.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core14.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); uploadFileSize = totalFileSize; uploadFilePath = parameters.file; isGzip = false; } else { - core13.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); + core14.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); } let abortFileUpload = false; while (offset < uploadFileSize) { @@ -120853,7 +120732,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += chunkSize; - core13.warning(`Aborting upload for ${parameters.file} due to failure`); + core14.warning(`Aborting upload for ${parameters.file} due to failure`); abortFileUpload = true; } else { if (uploadFileSize > 8388608) { @@ -120861,7 +120740,7 @@ var require_upload_http_client = __commonJS({ } } } - core13.debug(`deleting temporary gzip file ${tempFile.path}`); + core14.debug(`deleting temporary gzip file ${tempFile.path}`); yield tempFile.cleanup(); return { isSuccess: isUploadSuccessful, @@ -120900,7 +120779,7 @@ var require_upload_http_client = __commonJS({ if (response) { (0, utils_1.displayHttpDiagnostics)(response); } - core13.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); + core14.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); return true; } return false; @@ -120908,14 +120787,14 @@ var require_upload_http_client = __commonJS({ const backOff = (retryAfterValue) => __awaiter4(this, void 0, void 0, function* () { this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core13.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); + core14.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core13.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); + core14.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); yield (0, utils_1.sleep)(backoffTime); } - core13.info(`Finished backoff for retry #${retryCount}, continuing with upload`); + core14.info(`Finished backoff for retry #${retryCount}, continuing with upload`); return; }); while (retryCount <= retryLimit) { @@ -120923,7 +120802,7 @@ var require_upload_http_client = __commonJS({ try { response = yield uploadChunkRequest(); } catch (error2) { - core13.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); + core14.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); console.log(error2); if (incrementAndCheckRetryLimit()) { return false; @@ -120935,13 +120814,13 @@ var require_upload_http_client = __commonJS({ if ((0, utils_1.isSuccessStatusCode)(response.message.statusCode)) { return true; } else if ((0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core13.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); + core14.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); if (incrementAndCheckRetryLimit(response)) { return false; } (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { - core13.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); + core14.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); (0, utils_1.displayHttpDiagnostics)(response); return false; } @@ -120959,7 +120838,7 @@ var require_upload_http_client = __commonJS({ resourceUrl.searchParams.append("artifactName", artifactName); const parameters = { Size: size }; const data = JSON.stringify(parameters, null, 2); - core13.debug(`URL is ${resourceUrl.toString()}`); + core14.debug(`URL is ${resourceUrl.toString()}`); const client = this.uploadHttpManager.getClient(0); const headers = (0, utils_1.getUploadHeaders)("application/json", false); const customErrorMessages = /* @__PURE__ */ new Map([ @@ -120972,7 +120851,7 @@ var require_upload_http_client = __commonJS({ return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); yield response.readBody(); - core13.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); + core14.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); }); } }; @@ -121041,7 +120920,7 @@ var require_download_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DownloadHttpClient = void 0; var fs7 = __importStar4(require("fs")); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var zlib = __importStar4(require("zlib")); var utils_1 = require_utils14(); var url_1 = require("url"); @@ -121095,11 +120974,11 @@ var require_download_http_client = __commonJS({ downloadSingleArtifact(downloadItems) { return __awaiter4(this, void 0, void 0, function* () { const DOWNLOAD_CONCURRENCY = (0, config_variables_1.getDownloadFileConcurrency)(); - core13.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); + core14.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; let currentFile = 0; let downloadedFiles = 0; - core13.info(`Total number of files that will be downloaded: ${downloadItems.length}`); + core14.info(`Total number of files that will be downloaded: ${downloadItems.length}`); this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length); this.statusReporter.start(); yield Promise.all(parallelDownloads.map((index) => __awaiter4(this, void 0, void 0, function* () { @@ -121108,8 +120987,8 @@ var require_download_http_client = __commonJS({ currentFile += 1; const startTime = perf_hooks_1.performance.now(); yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); - if (core13.isDebug()) { - core13.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); + if (core14.isDebug()) { + core14.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); } this.statusReporter.incrementProcessedCount(); } @@ -121147,19 +121026,19 @@ var require_download_http_client = __commonJS({ } else { this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core13.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); + core14.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core13.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); + core14.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); yield (0, utils_1.sleep)(backoffTime); } - core13.info(`Finished backoff for retry #${retryCount}, continuing with download`); + core14.info(`Finished backoff for retry #${retryCount}, continuing with download`); } }); const isAllBytesReceived = (expected, received) => { if (!expected || !received || process.env["ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION"]) { - core13.info("Skipping download validation."); + core14.info("Skipping download validation."); return true; } return parseInt(expected) === received; @@ -121180,7 +121059,7 @@ var require_download_http_client = __commonJS({ try { response = yield makeDownloadRequest(); } catch (error2) { - core13.info("An error occurred while attempting to download a file"); + core14.info("An error occurred while attempting to download a file"); console.log(error2); yield backOff(); continue; @@ -121200,7 +121079,7 @@ var require_download_http_client = __commonJS({ } } if (forceRetry || (0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core13.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); + core14.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); resetDestinationStream(downloadPath); (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { @@ -121222,29 +121101,29 @@ var require_download_http_client = __commonJS({ if (isGzip) { const gunzip = zlib.createGunzip(); response.message.on("error", (error2) => { - core13.info(`An error occurred while attempting to read the response stream`); + core14.info(`An error occurred while attempting to read the response stream`); gunzip.close(); destinationStream.close(); reject(error2); }).pipe(gunzip).on("error", (error2) => { - core13.info(`An error occurred while attempting to decompress the response stream`); + core14.info(`An error occurred while attempting to decompress the response stream`); destinationStream.close(); reject(error2); }).pipe(destinationStream).on("close", () => { resolve5(); }).on("error", (error2) => { - core13.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core14.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error2); }); } else { response.message.on("error", (error2) => { - core13.info(`An error occurred while attempting to read the response stream`); + core14.info(`An error occurred while attempting to read the response stream`); destinationStream.close(); reject(error2); }).pipe(destinationStream).on("close", () => { resolve5(); }).on("error", (error2) => { - core13.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core14.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error2); }); } @@ -121383,7 +121262,7 @@ var require_artifact_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultArtifactClient = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var upload_specification_1 = require_upload_specification(); var upload_http_client_1 = require_upload_http_client(); var utils_1 = require_utils14(); @@ -121404,7 +121283,7 @@ var require_artifact_client = __commonJS({ */ uploadArtifact(name, files, rootDirectory, options) { return __awaiter4(this, void 0, void 0, function* () { - core13.info(`Starting artifact upload + core14.info(`Starting artifact upload For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`); (0, path_and_artifact_name_validation_1.checkArtifactName)(name); const uploadSpecification = (0, upload_specification_1.getUploadSpecification)(name, rootDirectory, files); @@ -121416,24 +121295,24 @@ For more detailed logs during the artifact upload process, enable step-debugging }; const uploadHttpClient = new upload_http_client_1.UploadHttpClient(); if (uploadSpecification.length === 0) { - core13.warning(`No files found that can be uploaded`); + core14.warning(`No files found that can be uploaded`); } else { const response = yield uploadHttpClient.createArtifactInFileContainer(name, options); if (!response.fileContainerResourceUrl) { - core13.debug(response.toString()); + core14.debug(response.toString()); throw new Error("No URL provided by the Artifact Service to upload an artifact to"); } - core13.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); - core13.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); + core14.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); + core14.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); - core13.info(`File upload process has finished. Finalizing the artifact upload`); + core14.info(`File upload process has finished. Finalizing the artifact upload`); yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); if (uploadResult.failedItems.length > 0) { - core13.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); + core14.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); } else { - core13.info(`Artifact has been finalized. All files have been successfully uploaded!`); + core14.info(`Artifact has been finalized. All files have been successfully uploaded!`); } - core13.info(` + core14.info(` The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage @@ -121467,10 +121346,10 @@ Note: The size of downloaded zips can differ significantly from the reported siz path6 = (0, path_1.resolve)(path6); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path6, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { - core13.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); + core14.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); - core13.info("Directory structure has been set up for the artifact"); + core14.info("Directory structure has been set up for the artifact"); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); } @@ -121486,7 +121365,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz const response = []; const artifacts = yield downloadHttpClient.listArtifacts(); if (artifacts.count === 0) { - core13.info("Unable to find any artifacts for the associated workflow"); + core14.info("Unable to find any artifacts for the associated workflow"); return response; } if (!path6) { @@ -121498,11 +121377,11 @@ Note: The size of downloaded zips can differ significantly from the reported siz while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; - core13.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); + core14.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path6, true); if (downloadSpecification.filesToDownload.length === 0) { - core13.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); + core14.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); @@ -121568,7 +121447,7 @@ var require_internal_glob_options_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -121580,23 +121459,23 @@ var require_internal_glob_options_helper2 = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core13.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core13.debug(`matchDirectories '${result.matchDirectories}'`); + core14.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core13.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -122280,7 +122159,7 @@ var require_internal_globber2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs7 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); var path6 = __importStar4(require("path")); @@ -122333,7 +122212,7 @@ var require_internal_globber2 = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core13.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await4(fs7.promises.lstat(searchPath)); } catch (err) { @@ -122408,7 +122287,7 @@ var require_internal_globber2 = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core13.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -122424,7 +122303,7 @@ var require_internal_globber2 = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -122517,7 +122396,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; var crypto = __importStar4(require("crypto")); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs7 = __importStar4(require("fs")); var stream = __importStar4(require("stream")); var util = __importStar4(require("util")); @@ -122526,7 +122405,7 @@ var require_internal_hash_files = __commonJS({ var _a, e_1, _b, _c; var _d; return __awaiter4(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core13.info : core13.debug; + const writeDelegate = verbose ? core14.info : core14.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto.createHash("sha256"); @@ -122634,22 +122513,76 @@ var require_glob4 = __commonJS({ // src/analyze-action-post.ts var fs6 = __toESM(require("fs")); -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); // src/actions-util.ts var fs = __toESM(require("fs")); -var core3 = __toESM(require_core()); +var core4 = __toESM(require_core()); var toolrunner = __toESM(require_toolrunner()); var github = __toESM(require_github()); var io2 = __toESM(require_io()); // src/util.ts var path = __toESM(require("path")); -var core2 = __toESM(require_core()); +var core3 = __toESM(require_core()); var exec = __toESM(require_exec()); var io = __toESM(require_io()); var import_del = __toESM(require_del()); -var import_get_folder_size = __toESM(require_get_folder_size()); + +// node_modules/get-folder-size/index.js +var import_node_path = require("node:path"); +async function getFolderSize(itemPath, options) { + return await core(itemPath, options, { errors: true }); +} +getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); +getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); +async function core(rootItemPath, options = {}, returnType = {}) { + const fs7 = options.fs || await import("node:fs/promises"); + let folderSize = 0n; + const foundInos = /* @__PURE__ */ new Set(); + const errors = []; + await processItem(rootItemPath); + async function processItem(itemPath) { + if (options.ignore?.test(itemPath)) return; + const stats = returnType.strict ? await fs7.lstat(itemPath, { bigint: true }) : await fs7.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + if (typeof stats !== "object") return; + if (!foundInos.has(stats.ino)) { + foundInos.add(stats.ino); + folderSize += stats.size; + } + if (stats.isDirectory()) { + const directoryItems = returnType.strict ? await fs7.readdir(itemPath) : await fs7.readdir(itemPath).catch((error2) => errors.push(error2)); + if (typeof directoryItems !== "object") return; + await Promise.all( + directoryItems.map( + (directoryItem) => processItem((0, import_node_path.join)(itemPath, directoryItem)) + ) + ); + } + } + if (!options.bigint) { + if (folderSize > BigInt(Number.MAX_SAFE_INTEGER)) { + const error2 = new RangeError( + "The folder size is too large to return as a Number. You can instruct this package to return a BigInt instead." + ); + if (returnType.strict) { + throw error2; + } + errors.push(error2); + folderSize = Number.MAX_SAFE_INTEGER; + } else { + folderSize = Number(folderSize); + } + } + if (returnType.errors) { + return { + size: folderSize, + errors: errors.length > 0 ? errors : null + }; + } else { + return folderSize; + } +} // node_modules/js-yaml/dist/js-yaml.mjs function isNothing(subject) { @@ -123220,7 +123153,7 @@ var json = failsafe.extend({ float ] }); -var core = json; +var core2 = json; var YAML_DATE_REGEXP = new RegExp( "^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$" ); @@ -123434,7 +123367,7 @@ var set = new type("tag:yaml.org,2002:set", { resolve: resolveYamlSet, construct: constructYamlSet }); -var _default = core.extend({ +var _default = core2.extend({ implicit: [ timestamp, merge @@ -125312,7 +125245,7 @@ function checkGitHubVersionInRange(version, logger) { ); } hasBeenWarnedAboutVersion = true; - core2.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true); + core3.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true); } function apiVersionInRange(version, minimumVersion2, maximumVersion2) { if (!semver.satisfies(version, `>=${minimumVersion2}`)) { @@ -125365,14 +125298,14 @@ async function asyncSome(array, predicate) { // src/actions-util.ts var pkg = require_package(); var getRequiredInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); if (!value) { throw new ConfigurationError(`Input required and not supplied: ${name}`); } return value; }; var getOptionalInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); return value.length > 0 ? value : void 0; }; function getTemporaryDirectory() { @@ -125453,7 +125386,7 @@ async function runTool(cmd, args = [], opts = {}) { } var persistedInputsKey = "persisted_inputs"; var restoreInputs = function() { - const persistedInputs = core3.getState(persistedInputsKey); + const persistedInputs = core4.getState(persistedInputsKey); if (persistedInputs) { for (const [name, value] of JSON.parse(persistedInputs)) { process.env[name] = value; @@ -125462,7 +125395,7 @@ var restoreInputs = function() { }; // src/api-client.ts -var core4 = __toESM(require_core()); +var core5 = __toESM(require_core()); var githubUtils = __toESM(require_utils4()); var retry = __toESM(require_dist_node15()); var import_console_log_level = __toESM(require_console_log_level()); @@ -125516,7 +125449,7 @@ async function getGitHubVersion() { // src/codeql.ts var fs4 = __toESM(require("fs")); var path4 = __toESM(require("path")); -var core9 = __toESM(require_core()); +var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); // src/cli-errors.ts @@ -125770,7 +125703,7 @@ var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { var supportedAnalysisKinds = new Set(Object.values(AnalysisKind)); // src/caching-utils.ts -var core5 = __toESM(require_core()); +var core6 = __toESM(require_core()); // src/feature-flags.ts var semver3 = __toESM(require_semver2()); @@ -125781,13 +125714,13 @@ var path2 = __toESM(require("path")); var actionsCache = __toESM(require_cache3()); // src/git-utils.ts -var core6 = __toESM(require_core()); +var core7 = __toESM(require_core()); var toolrunner2 = __toESM(require_toolrunner()); var io3 = __toESM(require_io()); var runGitCommand = async function(workingDirectory, args, customErrorMessage) { let stdout = ""; let stderr = ""; - core6.debug(`Running git command: git ${args.join(" ")}`); + core7.debug(`Running git command: git ${args.join(" ")}`); try { await new toolrunner2.ToolRunner(await io3.which("git", true), args, { silent: true, @@ -125807,7 +125740,7 @@ var runGitCommand = async function(workingDirectory, args, customErrorMessage) { if (stderr.includes("not a git repository")) { reason = "The checkout path provided to the action does not appear to be a git repository."; } - core6.info(`git call failed. ${customErrorMessage} Error: ${reason}`); + core7.info(`git call failed. ${customErrorMessage} Error: ${reason}`); throw error2; } }; @@ -125918,7 +125851,7 @@ async function getRef() { ) !== head; if (hasChangedRef) { const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head"); - core6.debug( + core7.debug( `No longer on merge commit, rewriting ref from ${ref} to ${newRef}.` ); return newRef; @@ -125944,16 +125877,16 @@ async function isAnalyzingDefaultBranch() { } // src/logging.ts -var core7 = __toESM(require_core()); +var core8 = __toESM(require_core()); function getActionsLogger() { - return core7; + return core8; } function withGroup(groupName, f) { - core7.startGroup(groupName); + core8.startGroup(groupName); try { return f(); } finally { - core7.endGroup(); + core8.endGroup(); } } @@ -126304,7 +126237,7 @@ var toolcache = __toESM(require_tool_cache()); var semver5 = __toESM(require_semver2()); // src/tools-download.ts -var core8 = __toESM(require_core()); +var core9 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); var toolcache2 = __toESM(require_tool_cache()); var import_follow_redirects = __toESM(require_follow_redirects()); @@ -126763,12 +126696,12 @@ ${output}` ); } else if (checkVersion && process.env["CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */] !== "true" && !await codeQlVersionAtLeast(codeql, CODEQL_NEXT_MINIMUM_VERSION)) { const result = await codeql.getVersion(); - core9.warning( + core10.warning( `CodeQL CLI version ${result.version} was discontinued on ${GHES_MOST_RECENT_DEPRECATION_DATE} alongside GitHub Enterprise Server ${GHES_VERSION_MOST_RECENTLY_DEPRECATED} and will not be supported by the next minor release of the CodeQL Action. Please update to CodeQL CLI version ${CODEQL_NEXT_MINIMUM_VERSION} or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version. Alternatively, if you want to continue using CodeQL CLI version ${result.version}, you can replace 'github/codeql-action/*@v${getActionVersion().split(".")[0]}' by 'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to continue using this version of the CodeQL Action.` ); - core9.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); + core10.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); } return codeql; } @@ -126878,7 +126811,7 @@ var fs5 = __toESM(require("fs")); var path5 = __toESM(require("path")); var artifact = __toESM(require_artifact2()); var artifactLegacy = __toESM(require_artifact_client2()); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); var import_archiver = __toESM(require_archiver()); var import_del3 = __toESM(require_del()); @@ -126887,7 +126820,7 @@ var io5 = __toESM(require_io()); var import_del2 = __toESM(require_del()); // src/autobuild.ts -var core10 = __toESM(require_core()); +var core11 = __toESM(require_core()); // src/dependency-caching.ts var import_path = require("path"); @@ -126944,7 +126877,7 @@ async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghV } const uploadSupported = isSafeArtifactUpload(codeQlVersion); if (!uploadSupported) { - core11.info( + core12.info( `Skipping debug artifact upload because the current CLI does not support safe upload. Please upgrade to CLI v${SafeArtifactUploadVersion} or later.` ); return "upload-not-supported"; @@ -126958,7 +126891,7 @@ async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghV ).sort()) suffix += `-${matrixVal}`; } catch { - core11.info( + core12.info( "Could not parse user-specified `matrix` input into JSON. The debug artifact will not be named with the user's `matrix` input." ); } @@ -126976,7 +126909,7 @@ async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghV ); return "upload-successful"; } catch (e) { - core11.warning(`Failed to upload debug artifacts: ${e}`); + core12.warning(`Failed to upload debug artifacts: ${e}`); return "upload-failed"; } } @@ -127027,7 +126960,7 @@ async function runWrapper() { } } } catch (error2) { - core12.setFailed( + core13.setFailed( `analyze post-action step failed: ${getErrorMessage(error2)}` ); } diff --git a/lib/analyze-action.js b/lib/analyze-action.js index 4d8aa26d3..20f118bbc 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({ var require_mock_client = __commonJS({ "node_modules/undici/lib/mock/mock-client.js"(exports2, module2) { "use strict"; - var { promisify: promisify3 } = require("util"); + var { promisify: promisify2 } = require("util"); var Client = require_client(); var { buildMockDispatch } = require_mock_utils(); var { @@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({ return new MockInterceptor(opts, this[kDispatches]); } async [kClose]() { - await promisify3(this[kOriginalClose])(); + await promisify2(this[kOriginalClose])(); this[kConnected] = 0; this[kMockAgent][Symbols.kClients].delete(this[kOrigin]); } @@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({ var require_mock_pool = __commonJS({ "node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) { "use strict"; - var { promisify: promisify3 } = require("util"); + var { promisify: promisify2 } = require("util"); var Pool = require_pool(); var { buildMockDispatch } = require_mock_utils(); var { @@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({ return new MockInterceptor(opts, this[kDispatches]); } async [kClose]() { - await promisify3(this[kOriginalClose])(); + await promisify2(this[kOriginalClose])(); this[kConnected] = 0; this[kMockAgent][Symbols.kClients].delete(this[kOrigin]); } @@ -29931,14 +29931,14 @@ var require_out4 = __commonJS({ var require_path_type = __commonJS({ "node_modules/path-type/index.js"(exports2) { "use strict"; - var { promisify: promisify3 } = require("util"); + var { promisify: promisify2 } = require("util"); var fs17 = require("fs"); async function isType(fsStatType, statsMethodName, filePath) { if (typeof filePath !== "string") { throw new TypeError(`Expected a string, got ${typeof filePath}`); } try { - const stats = await promisify3(fs17[fsStatType])(filePath); + const stats = await promisify2(fs17[fsStatType])(filePath); return stats[statsMethodName](); } catch (error2) { if (error2.code === "ENOENT") { @@ -30431,7 +30431,7 @@ var require_slash = __commonJS({ var require_gitignore = __commonJS({ "node_modules/globby/gitignore.js"(exports2, module2) { "use strict"; - var { promisify: promisify3 } = require("util"); + var { promisify: promisify2 } = require("util"); var fs17 = require("fs"); var path16 = require("path"); var fastGlob = require_out4(); @@ -30443,7 +30443,7 @@ var require_gitignore = __commonJS({ "**/coverage/**", "**/.git" ]; - var readFileP = promisify3(fs17.readFile); + var readFileP = promisify2(fs17.readFile); var mapGitIgnorePatternTo = (base) => (ignore) => { if (ignore.startsWith("!")) { return "!" + path16.posix.join(base, ignore.slice(1)); @@ -34227,7 +34227,7 @@ var require_p_map = __commonJS({ var require_del = __commonJS({ "node_modules/del/index.js"(exports2, module2) { "use strict"; - var { promisify: promisify3 } = require("util"); + var { promisify: promisify2 } = require("util"); var path16 = require("path"); var globby = require_globby(); var isGlob = require_is_glob(); @@ -34237,7 +34237,7 @@ var require_del = __commonJS({ var isPathInside = require_is_path_inside(); var rimraf = require_rimraf(); var pMap = require_p_map(); - var rimrafP = promisify3(rimraf); + var rimrafP = promisify2(rimraf); var rimrafOptions = { glob: false, unlink: gracefulFs.unlink, @@ -34336,127 +34336,6 @@ var require_del = __commonJS({ } }); -// node_modules/tiny-each-async/index.js -var require_tiny_each_async = __commonJS({ - "node_modules/tiny-each-async/index.js"(exports2, module2) { - "use strict"; - module2.exports = function eachAsync(arr, parallelLimit, iteratorFn, cb) { - var pending = 0; - var index = 0; - var lastIndex = arr.length - 1; - var called = false; - var limit; - var callback; - var iterate; - if (typeof parallelLimit === "number") { - limit = parallelLimit; - iterate = iteratorFn; - callback = cb || function noop() { - }; - } else { - iterate = parallelLimit; - callback = iteratorFn || function noop() { - }; - limit = arr.length; - } - if (!arr.length) { - return callback(); - } - var iteratorLength = iterate.length; - var shouldCallNextIterator = function shouldCallNextIterator2() { - return !called && pending < limit && index < lastIndex; - }; - var iteratorCallback = function iteratorCallback2(err) { - if (called) { - return; - } - pending--; - if (err || index === lastIndex && !pending) { - called = true; - callback(err); - } else if (shouldCallNextIterator()) { - processIterator(++index); - } - }; - var processIterator = function processIterator2() { - pending++; - var args = iteratorLength === 2 ? [arr[index], iteratorCallback] : [arr[index], index, iteratorCallback]; - iterate.apply(null, args); - if (shouldCallNextIterator()) { - processIterator2(++index); - } - }; - processIterator(); - }; - } -}); - -// node_modules/get-folder-size/index.js -var require_get_folder_size = __commonJS({ - "node_modules/get-folder-size/index.js"(exports2, module2) { - "use strict"; - var fs17 = require("fs"); - var path16 = require("path"); - var eachAsync = require_tiny_each_async(); - function readSizeRecursive(seen, item, ignoreRegEx, callback) { - let cb; - let ignoreRegExp; - if (!callback) { - cb = ignoreRegEx; - ignoreRegExp = null; - } else { - cb = callback; - ignoreRegExp = ignoreRegEx; - } - fs17.lstat(item, function lstat(e, stats) { - let total = !e ? stats.size || 0 : 0; - if (stats) { - if (seen.has(stats.ino)) { - return cb(null, 0); - } - seen.add(stats.ino); - } - if (!e && stats.isDirectory()) { - fs17.readdir(item, (err, list) => { - if (err) { - return cb(err); - } - eachAsync( - list, - 5e3, - (dirItem, next) => { - readSizeRecursive( - seen, - path16.join(item, dirItem), - ignoreRegExp, - (error2, size) => { - if (!error2) { - total += size; - } - next(error2); - } - ); - }, - (finalErr) => { - cb(finalErr, total); - } - ); - }); - } else { - if (ignoreRegExp && ignoreRegExp.test(item)) { - total = 0; - } - cb(e, total); - } - }); - } - module2.exports = (...args) => { - args.unshift(/* @__PURE__ */ new Set()); - return readSizeRecursive(...args); - }; - } -}); - // node_modules/semver/internal/constants.js var require_constants9 = __commonJS({ "node_modules/semver/internal/constants.js"(exports2, module2) { @@ -36412,7 +36291,7 @@ var require_package = __commonJS({ "fast-deep-equal": "^3.1.3", "file-url": "^3.0.0", "follow-redirects": "^1.15.11", - "get-folder-size": "^2.0.1", + "get-folder-size": "^5.0.0", "js-yaml": "^4.1.0", jsonschema: "1.4.1", long: "^5.3.2", @@ -38076,7 +37955,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -38086,15 +37965,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core14.debug(`implicitDescendants '${result.implicitDescendants}'`); + core15.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -38758,7 +38637,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var fs17 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path16 = __importStar4(require("path")); @@ -38809,7 +38688,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core14.debug(`Search path '${searchPath}'`); + core15.debug(`Search path '${searchPath}'`); try { yield __await4(fs17.promises.lstat(searchPath)); } catch (err) { @@ -38881,7 +38760,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core14.debug(`Broken symlink '${item.path}'`); + core15.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -38897,7 +38776,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -40221,7 +40100,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob2 = __importStar4(require_glob2()); var io7 = __importStar4(require_io()); @@ -40274,7 +40153,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path16.relative(workspace, file).replace(new RegExp(`\\${path16.sep}`, "g"), "/"); - core14.debug(`Matched: ${relativeFile}`); + core15.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -40304,7 +40183,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core15.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -40315,10 +40194,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core14.debug(err.message); + core15.debug(err.message); } versionOutput = versionOutput.trim(); - core14.debug(versionOutput); + core15.debug(versionOutput); return versionOutput; }); } @@ -40326,7 +40205,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core14.debug(`zstd version: ${version}`); + core15.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -75638,7 +75517,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -75680,7 +75559,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core15.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -75735,14 +75614,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core15.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core15.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -75813,7 +75692,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -75874,9 +75753,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core15.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core14.debug(`${name} - Error is not retryable`); + core15.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -75981,7 +75860,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -76019,7 +75898,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core15.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -76053,7 +75932,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core15.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -76103,7 +75982,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core15.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -76114,7 +75993,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core14.debug("Unable to validate download, no Content-Length header"); + core15.debug("Unable to validate download, no Content-Length header"); } }); } @@ -76234,7 +76113,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core14.debug("Unable to determine content length, downloading file with http-client..."); + core15.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -76314,7 +76193,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -76334,9 +76213,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core15.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -76373,12 +76252,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Download concurrency: ${result.downloadConcurrency}`); - core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core14.debug(`Lookup only: ${result.lookupOnly}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Download concurrency: ${result.downloadConcurrency}`); + core15.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core15.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core15.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core15.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -76558,7 +76437,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs17 = __importStar4(require("fs")); @@ -76576,7 +76455,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core14.debug(`Resource Url: ${url}`); + core15.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -76604,7 +76483,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core14.isDebug()) { + if (core15.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -76617,9 +76496,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core14.setSecret(cacheDownloadUrl); - core14.debug(`Cache Result:`); - core14.debug(JSON.stringify(cacheResult)); + core15.setSecret(cacheDownloadUrl); + core15.debug(`Cache Result:`); + core15.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -76634,10 +76513,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core15.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core15.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -76682,7 +76561,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core15.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -76704,7 +76583,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core14.debug("Awaiting all uploads"); + core15.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -76747,16 +76626,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core14.debug("Upload cache"); + core15.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core14.debug("Commiting cache"); + core15.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core15.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core14.info("Cache saved successfully"); + core15.info("Cache saved successfully"); } }); } @@ -82182,7 +82061,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var path16 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -82235,7 +82114,7 @@ var require_cache3 = __commonJS({ function restoreCache4(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -82251,8 +82130,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82270,19 +82149,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path16.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -82290,16 +82169,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error2.message}`); + core15.error(`Failed to restore: ${error2.message}`); } else { - core14.warning(`Failed to restore: ${error2.message}`); + core15.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82310,8 +82189,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82329,30 +82208,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core14.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core15.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core14.info(`Cache hit for restore-key: ${response.matchedKey}`); + core15.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core14.info(`Cache hit for: ${response.matchedKey}`); + core15.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path16.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive path: ${archivePath}`); - core14.debug(`Starting download of archive to: ${archivePath}`); + core15.debug(`Archive path: ${archivePath}`); + core15.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core14.isDebug()) { + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -82360,9 +82239,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error2.message}`); + core15.error(`Failed to restore: ${error2.message}`); } else { - core14.warning(`Failed to restore: ${error2.message}`); + core15.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -82371,7 +82250,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82380,7 +82259,7 @@ var require_cache3 = __commonJS({ function saveCache4(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -82399,26 +82278,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path16.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core14.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -82431,26 +82310,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core14.debug(`Saving Cache (ID: ${cacheId})`); + core15.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core14.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -82463,26 +82342,26 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path16.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } options.archiveSizeBytes = archiveFileSize; - core14.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -82496,10 +82375,10 @@ var require_cache3 = __commonJS({ } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core14.debug(`Failed to reserve cache: ${error2}`); + core15.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core14.debug(`Attempting to upload cache located at: ${archivePath}`); + core15.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -82507,7 +82386,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core15.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); } @@ -82517,19 +82396,19 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core14.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -82737,7 +82616,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -82760,10 +82639,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core14.info(err.message); + core15.info(err.message); } const seconds = this.getSleepAmount(); - core14.info(`Waiting ${seconds} seconds before trying again`); + core15.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -82843,7 +82722,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var io7 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs17 = __importStar4(require("fs")); @@ -82872,8 +82751,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path16.join(_getTempDirectory(), crypto.randomUUID()); yield io7.mkdirP(path16.dirname(dest)); - core14.debug(`Downloading ${url}`); - core14.debug(`Destination ${dest}`); + core15.debug(`Downloading ${url}`); + core15.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -82900,7 +82779,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core14.debug("set auth"); + core15.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -82909,7 +82788,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError(response.message.statusCode); - core14.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core15.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -82918,16 +82797,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs17.createWriteStream(dest)); - core14.debug("download complete"); + core15.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core14.debug("download failed"); + core15.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core14.debug(`Failed to delete '${dest}'. ${err.message}`); + core15.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -82942,7 +82821,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core15.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -82992,7 +82871,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core14.debug("Checking tar --version"); + core15.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -83002,7 +82881,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core14.debug(versionOutput.trim()); + core15.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -83010,7 +82889,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core14.isDebug() && !flags.includes("v")) { + if (core15.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -83042,7 +82921,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core14.isDebug()) { + if (core15.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -83087,7 +82966,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core14.debug(`Using pwsh at path: ${pwshPath}`); + core15.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -83107,7 +82986,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core14.debug(`Using powershell at path: ${powershellPath}`); + core15.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -83116,7 +82995,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core14.isDebug()) { + if (!core15.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -83127,8 +83006,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os5.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source dir: ${sourceDir}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source dir: ${sourceDir}`); if (!fs17.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -83146,14 +83025,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os5.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source file: ${sourceFile}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source file: ${sourceFile}`); if (!fs17.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path16.join(destFolder, targetFile); - core14.debug(`destination file ${destPath}`); + core15.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -83177,12 +83056,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core14.debug(`checking cache: ${cachePath}`); + core15.debug(`checking cache: ${cachePath}`); if (fs17.existsSync(cachePath) && fs17.existsSync(`${cachePath}.complete`)) { - core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core14.debug("not found"); + core15.debug("not found"); } } return toolPath; @@ -83213,7 +83092,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core14.debug("set auth"); + core15.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -83234,7 +83113,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core14.debug("Invalid json"); + core15.debug("Invalid json"); } } return releases; @@ -83260,7 +83139,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path16.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); - core14.debug(`destination ${folderPath}`); + core15.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -83272,19 +83151,19 @@ var require_tool_cache = __commonJS({ const folderPath = path16.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs17.writeFileSync(markerPath, ""); - core14.debug("finished caching tool"); + core15.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core14.debug(`isExplicit: ${c}`); + core15.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core14.debug(`explicit? ${valid3}`); + core15.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core14.debug(`evaluating ${versions.length} versions`); + core15.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -83300,9 +83179,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core14.debug(`matched: ${version}`); + core15.debug(`matched: ${version}`); } else { - core14.debug("match not found"); + core15.debug("match not found"); } return version; } @@ -83911,7 +83790,7 @@ var require_internal_glob_options_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -83923,23 +83802,23 @@ var require_internal_glob_options_helper2 = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core14.debug(`implicitDescendants '${result.implicitDescendants}'`); + core15.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core14.debug(`matchDirectories '${result.matchDirectories}'`); + core15.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core15.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -84623,7 +84502,7 @@ var require_internal_globber2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var fs17 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); var path16 = __importStar4(require("path")); @@ -84676,7 +84555,7 @@ var require_internal_globber2 = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core14.debug(`Search path '${searchPath}'`); + core15.debug(`Search path '${searchPath}'`); try { yield __await4(fs17.promises.lstat(searchPath)); } catch (err) { @@ -84751,7 +84630,7 @@ var require_internal_globber2 = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core14.debug(`Broken symlink '${item.path}'`); + core15.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -84767,7 +84646,7 @@ var require_internal_globber2 = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -84860,7 +84739,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; var crypto = __importStar4(require("crypto")); - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var fs17 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); var util = __importStar4(require("util")); @@ -84869,7 +84748,7 @@ var require_internal_hash_files = __commonJS({ var _a, e_1, _b, _c; var _d; return __awaiter4(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core14.info : core14.debug; + const writeDelegate = verbose ? core15.info : core15.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto.createHash("sha256"); @@ -89203,12 +89082,12 @@ module.exports = __toCommonJS(analyze_action_exports); var fs16 = __toESM(require("fs")); var import_path4 = __toESM(require("path")); var import_perf_hooks3 = require("perf_hooks"); -var core13 = __toESM(require_core()); +var core14 = __toESM(require_core()); // src/actions-util.ts var fs2 = __toESM(require("fs")); var path2 = __toESM(require("path")); -var core3 = __toESM(require_core()); +var core4 = __toESM(require_core()); var toolrunner = __toESM(require_toolrunner()); var github = __toESM(require_github()); var io2 = __toESM(require_io()); @@ -89217,8 +89096,7 @@ var io2 = __toESM(require_io()); var fs = __toESM(require("fs")); var os = __toESM(require("os")); var path = __toESM(require("path")); -var import_util = require("util"); -var core2 = __toESM(require_core()); +var core3 = __toESM(require_core()); var exec = __toESM(require_exec()); var io = __toESM(require_io()); @@ -89359,7 +89237,61 @@ function checkDiskSpace(directoryPath, dependencies = { // src/util.ts var import_del = __toESM(require_del()); -var import_get_folder_size = __toESM(require_get_folder_size()); + +// node_modules/get-folder-size/index.js +var import_node_path2 = require("node:path"); +async function getFolderSize(itemPath, options) { + return await core(itemPath, options, { errors: true }); +} +getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); +getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); +async function core(rootItemPath, options = {}, returnType = {}) { + const fs17 = options.fs || await import("node:fs/promises"); + let folderSize = 0n; + const foundInos = /* @__PURE__ */ new Set(); + const errors = []; + await processItem(rootItemPath); + async function processItem(itemPath) { + if (options.ignore?.test(itemPath)) return; + const stats = returnType.strict ? await fs17.lstat(itemPath, { bigint: true }) : await fs17.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + if (typeof stats !== "object") return; + if (!foundInos.has(stats.ino)) { + foundInos.add(stats.ino); + folderSize += stats.size; + } + if (stats.isDirectory()) { + const directoryItems = returnType.strict ? await fs17.readdir(itemPath) : await fs17.readdir(itemPath).catch((error2) => errors.push(error2)); + if (typeof directoryItems !== "object") return; + await Promise.all( + directoryItems.map( + (directoryItem) => processItem((0, import_node_path2.join)(itemPath, directoryItem)) + ) + ); + } + } + if (!options.bigint) { + if (folderSize > BigInt(Number.MAX_SAFE_INTEGER)) { + const error2 = new RangeError( + "The folder size is too large to return as a Number. You can instruct this package to return a BigInt instead." + ); + if (returnType.strict) { + throw error2; + } + errors.push(error2); + folderSize = Number.MAX_SAFE_INTEGER; + } else { + folderSize = Number(folderSize); + } + } + if (returnType.errors) { + return { + size: folderSize, + errors: errors.length > 0 ? errors : null + }; + } else { + return folderSize; + } +} // node_modules/js-yaml/dist/js-yaml.mjs function isNothing(subject) { @@ -89930,7 +89862,7 @@ var json = failsafe.extend({ float ] }); -var core = json; +var core2 = json; var YAML_DATE_REGEXP = new RegExp( "^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$" ); @@ -90144,7 +90076,7 @@ var set = new type("tag:yaml.org,2002:set", { resolve: resolveYamlSet, construct: constructYamlSet }); -var _default = core.extend({ +var _default = core2.extend({ implicit: [ timestamp, merge @@ -92216,11 +92148,11 @@ function assertNever(value) { throw new ExhaustivityCheckingError(value); } function initializeEnvironment(version) { - core2.exportVariable("CODEQL_ACTION_FEATURE_MULTI_LANGUAGE" /* FEATURE_MULTI_LANGUAGE */, "false"); - core2.exportVariable("CODEQL_ACTION_FEATURE_SANDWICH" /* FEATURE_SANDWICH */, "false"); - core2.exportVariable("CODEQL_ACTION_FEATURE_SARIF_COMBINE" /* FEATURE_SARIF_COMBINE */, "true"); - core2.exportVariable("CODEQL_ACTION_FEATURE_WILL_UPLOAD" /* FEATURE_WILL_UPLOAD */, "true"); - core2.exportVariable("CODEQL_ACTION_VERSION" /* VERSION */, version); + core3.exportVariable("CODEQL_ACTION_FEATURE_MULTI_LANGUAGE" /* FEATURE_MULTI_LANGUAGE */, "false"); + core3.exportVariable("CODEQL_ACTION_FEATURE_SANDWICH" /* FEATURE_SANDWICH */, "false"); + core3.exportVariable("CODEQL_ACTION_FEATURE_SARIF_COMBINE" /* FEATURE_SARIF_COMBINE */, "true"); + core3.exportVariable("CODEQL_ACTION_FEATURE_WILL_UPLOAD" /* FEATURE_WILL_UPLOAD */, "true"); + core3.exportVariable("CODEQL_ACTION_VERSION" /* VERSION */, version); } function getRequiredEnvParam(paramName) { const value = process.env[paramName]; @@ -92283,7 +92215,7 @@ function getTestingEnvironment() { } async function tryGetFolderBytes(cacheDir, logger, quiet = false) { try { - return await (0, import_util.promisify)(import_get_folder_size.default)(cacheDir); + return await getFolderSize.loose(cacheDir); } catch (e) { if (!quiet || logger.isDebug()) { logger.warning( @@ -92313,7 +92245,7 @@ async function withTimeout(timeoutMs, promise, onTimeout) { } async function checkForTimeout() { if (hadTimeout === true) { - core2.info( + core3.info( "A timeout occurred, force exiting the process after 30 seconds to prevent hanging." ); await delay(3e4, { allowProcessExit: true }); @@ -92349,7 +92281,7 @@ async function checkDiskUsage(logger) { } else { logger.debug(message); } - core2.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true"); + core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true"); } return { numAvailableBytes: diskUsage.free, @@ -92369,10 +92301,10 @@ function checkActionVersion(version, githubVersion) { semver.coerce(githubVersion.version) ?? "0.0.0", ">=3.11" )) { - core2.error( + core3.error( "CodeQL Action major versions v1 and v2 have been deprecated. Please update all occurrences of the CodeQL Action in your workflow files to v3. For more information, see https://github.blog/changelog/2025-01-10-code-scanning-codeql-action-v2-is-now-deprecated/" ); - core2.exportVariable("CODEQL_ACTION_DID_LOG_VERSION_DEPRECATION" /* LOG_VERSION_DEPRECATION */, "true"); + core3.exportVariable("CODEQL_ACTION_DID_LOG_VERSION_DEPRECATION" /* LOG_VERSION_DEPRECATION */, "true"); } } } @@ -92397,13 +92329,13 @@ async function checkSipEnablement(logger) { if (sipStatusOutput.stdout.includes( "System Integrity Protection status: enabled." )) { - core2.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true"); + core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true"); return true; } if (sipStatusOutput.stdout.includes( "System Integrity Protection status: disabled." )) { - core2.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false"); + core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false"); return false; } } @@ -92450,14 +92382,14 @@ async function asyncSome(array, predicate) { // src/actions-util.ts var pkg = require_package(); var getRequiredInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); if (!value) { throw new ConfigurationError(`Input required and not supplied: ${name}`); } return value; }; var getOptionalInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); return value.length > 0 ? value : void 0; }; function getTemporaryDirectory() { @@ -92501,7 +92433,7 @@ function getUploadValue(input) { case "never": return "never"; default: - core3.warning( + core4.warning( `Unrecognized 'upload' input to 'analyze' Action: ${input}. Defaulting to 'always'.` ); return "always"; @@ -92604,7 +92536,7 @@ var persistInputs = function() { const inputEnvironmentVariables = Object.entries(process.env).filter( ([name]) => name.startsWith("INPUT_") ); - core3.saveState(persistedInputsKey, JSON.stringify(inputEnvironmentVariables)); + core4.saveState(persistedInputsKey, JSON.stringify(inputEnvironmentVariables)); }; function getPullRequestBranches() { const pullRequest = github.context.payload.pull_request; @@ -92662,7 +92594,7 @@ var io5 = __toESM(require_io()); var import_del2 = __toESM(require_del()); // src/api-client.ts -var core4 = __toESM(require_core()); +var core5 = __toESM(require_core()); var githubUtils = __toESM(require_utils4()); var retry = __toESM(require_dist_node15()); var import_console_log_level = __toESM(require_console_log_level()); @@ -92767,7 +92699,7 @@ async function getAnalysisKey() { const workflowPath = await getWorkflowRelativePath(); const jobName = getRequiredEnvParam("GITHUB_JOB"); analysisKey = `${workflowPath}:${jobName}`; - core4.exportVariable(analysisKeyEnvVar, analysisKey); + core5.exportVariable(analysisKeyEnvVar, analysisKey); return analysisKey; } function computeAutomationID(analysis_key, environment) { @@ -92818,12 +92750,12 @@ function wrapApiConfigurationError(e) { } // src/autobuild.ts -var core10 = __toESM(require_core()); +var core11 = __toESM(require_core()); // src/codeql.ts var fs11 = __toESM(require("fs")); var path10 = __toESM(require("path")); -var core9 = __toESM(require_core()); +var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); // src/cli-errors.ts @@ -93077,7 +93009,7 @@ var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { var supportedAnalysisKinds = new Set(Object.values(AnalysisKind)); // src/caching-utils.ts -var core5 = __toESM(require_core()); +var core6 = __toESM(require_core()); async function getTotalCacheSize(paths, logger, quiet = false) { const sizes = await Promise.all( paths.map((cacheDir) => tryGetFolderBytes(cacheDir, logger, quiet)) @@ -93107,13 +93039,13 @@ var path3 = __toESM(require("path")); var actionsCache = __toESM(require_cache3()); // src/git-utils.ts -var core6 = __toESM(require_core()); +var core7 = __toESM(require_core()); var toolrunner2 = __toESM(require_toolrunner()); var io3 = __toESM(require_io()); var runGitCommand = async function(workingDirectory, args, customErrorMessage) { let stdout = ""; let stderr = ""; - core6.debug(`Running git command: git ${args.join(" ")}`); + core7.debug(`Running git command: git ${args.join(" ")}`); try { await new toolrunner2.ToolRunner(await io3.which("git", true), args, { silent: true, @@ -93133,7 +93065,7 @@ var runGitCommand = async function(workingDirectory, args, customErrorMessage) { if (stderr.includes("not a git repository")) { reason = "The checkout path provided to the action does not appear to be a git repository."; } - core6.info(`git call failed. ${customErrorMessage} Error: ${reason}`); + core7.info(`git call failed. ${customErrorMessage} Error: ${reason}`); throw error2; } }; @@ -93278,7 +93210,7 @@ async function getRef() { ) !== head; if (hasChangedRef) { const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head"); - core6.debug( + core7.debug( `No longer on merge commit, rewriting ref from ${ref} to ${newRef}.` ); return newRef; @@ -93304,16 +93236,16 @@ async function isAnalyzingDefaultBranch() { } // src/logging.ts -var core7 = __toESM(require_core()); +var core8 = __toESM(require_core()); function getActionsLogger() { - return core7; + return core8; } async function withGroupAsync(groupName, f) { - core7.startGroup(groupName); + core8.startGroup(groupName); try { return await f(); } finally { - core7.endGroup(); + core8.endGroup(); } } function formatDuration(durationMs) { @@ -94405,7 +94337,7 @@ var fs8 = __toESM(require("fs")); var os2 = __toESM(require("os")); var path7 = __toESM(require("path")); var import_perf_hooks = require("perf_hooks"); -var core8 = __toESM(require_core()); +var core9 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); var toolcache2 = __toESM(require_tool_cache()); var import_follow_redirects = __toESM(require_follow_redirects()); @@ -94459,10 +94391,10 @@ async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorizat }; } } catch (e) { - core8.warning( + core9.warning( `Failed to download and extract CodeQL bundle using streaming with error: ${getErrorMessage(e)}` ); - core8.warning(`Falling back to downloading the bundle before extracting.`); + core9.warning(`Falling back to downloading the bundle before extracting.`); await cleanUpGlob(dest, "CodeQL bundle", logger); } const toolsDownloadStart = import_perf_hooks.performance.now(); @@ -95543,12 +95475,12 @@ ${output}` ); } else if (checkVersion && process.env["CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */] !== "true" && !await codeQlVersionAtLeast(codeql, CODEQL_NEXT_MINIMUM_VERSION)) { const result = await codeql.getVersion(); - core9.warning( + core10.warning( `CodeQL CLI version ${result.version} was discontinued on ${GHES_MOST_RECENT_DEPRECATION_DATE} alongside GitHub Enterprise Server ${GHES_VERSION_MOST_RECENTLY_DEPRECATED} and will not be supported by the next minor release of the CodeQL Action. Please update to CodeQL CLI version ${CODEQL_NEXT_MINIMUM_VERSION} or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version. Alternatively, if you want to continue using CodeQL CLI version ${result.version}, you can replace 'github/codeql-action/*@v${getActionVersion().split(".")[0]}' by 'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to continue using this version of the CodeQL Action.` ); - core9.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); + core10.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); } return codeql; } @@ -95670,16 +95602,16 @@ async function setupCppAutobuild(codeql, logger) { logger.info( `Disabling ${featureName} as we are on a self-hosted runner.${getWorkflowEventName() !== "dynamic" ? ` To override this, set the ${envVar} environment variable to 'true' in your workflow. See ${"https://docs.github.com/en/actions/learn-github-actions/variables#defining-environment-variables-for-a-single-workflow" /* DEFINE_ENV_VARIABLES */} for more information.` : ""}` ); - core10.exportVariable(envVar, "false"); + core11.exportVariable(envVar, "false"); } else { logger.info( `Enabling ${featureName}. This can be disabled by setting the ${envVar} environment variable to 'false'. See ${"https://docs.github.com/en/actions/learn-github-actions/variables#defining-environment-variables-for-a-single-workflow" /* DEFINE_ENV_VARIABLES */} for more information.` ); - core10.exportVariable(envVar, "true"); + core11.exportVariable(envVar, "true"); } } else { logger.info(`Disabling ${featureName}.`); - core10.exportVariable(envVar, "false"); + core11.exportVariable(envVar, "false"); } } async function runAutobuild(config, language, logger) { @@ -95694,7 +95626,7 @@ async function runAutobuild(config, language, logger) { await codeQL.runAutobuild(config, language); } if (language === "go" /* go */) { - core10.exportVariable("CODEQL_ACTION_DID_AUTOBUILD_GOLANG" /* DID_AUTOBUILD_GOLANG */, "true"); + core11.exportVariable("CODEQL_ACTION_DID_AUTOBUILD_GOLANG" /* DID_AUTOBUILD_GOLANG */, "true"); } logger.endGroup(); } @@ -96388,7 +96320,7 @@ async function uploadDatabases(repositoryNwo, codeql, config, apiDetails, logger // src/status-report.ts var os4 = __toESM(require("os")); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -96404,12 +96336,12 @@ function getActionsStatus(error2, otherFailureCause) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core11.exportVariable( + core12.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core11.exportVariable( + core12.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -96428,14 +96360,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core12.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core12.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -96513,9 +96445,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scan async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core11.debug(`Sending status report: ${statusReportJSON}`); + core12.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core11.debug("In test mode. Status reports are not uploaded."); + core12.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -96534,26 +96466,26 @@ async function sendStatusReport(statusReport) { switch (e.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core11.warning( + core12.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading Code Scanning results requires write access. To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core11.warning(e.message); + core12.warning(e.message); } return; case 404: - core11.warning(e.message); + core12.warning(e.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core11.debug(INCOMPATIBLE_MSG); + core12.debug(INCOMPATIBLE_MSG); } else { - core11.debug(OUT_OF_DATE_MSG); + core12.debug(OUT_OF_DATE_MSG); } return; } } - core11.warning( + core12.warning( `An unexpected error occurred when sending code scanning status report: ${getErrorMessage( e )}` @@ -96565,7 +96497,7 @@ async function sendStatusReport(statusReport) { var fs15 = __toESM(require("fs")); var path14 = __toESM(require("path")); var import_zlib = __toESM(require("zlib")); -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); var import_file_url = __toESM(require_file_url()); var jsonschema = __toESM(require_lib2()); @@ -97822,7 +97754,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core12.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -97872,7 +97804,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple CodeQL runs with the same category is deprecated ${deprecationWarningMessage} for CodeQL CLI 2.16.6 and earlier. Please update your CodeQL CLI version or update your workflow to set a distinct category for each CodeQL run. ${deprecationMoreInformationMessage}` ); - core12.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -97937,13 +97869,13 @@ async function uploadPayload(payload, repositoryNwo, logger, target) { if (isHTTPError(e)) { switch (e.status) { case 403: - core12.warning(e.message || GENERIC_403_MSG); + core13.warning(e.message || GENERIC_403_MSG); break; case 404: - core12.warning(e.message || GENERIC_404_MSG); + core13.warning(e.message || GENERIC_404_MSG); break; default: - core12.warning(e.message); + core13.warning(e.message); break; } } @@ -98289,7 +98221,7 @@ function validateUniqueCategory(sarif, sentinelPrefix = CodeScanningTarget.senti `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core12.exportVariable(sentinelEnvVar, sentinelEnvVar); + core13.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -98468,7 +98400,7 @@ async function run() { } const apiDetails = getApiDetails(); const outputDir = getRequiredInput("output"); - core13.exportVariable("CODEQL_ACTION_SARIF_RESULTS_OUTPUT_DIR" /* SARIF_RESULTS_OUTPUT_DIR */, outputDir); + core14.exportVariable("CODEQL_ACTION_SARIF_RESULTS_OUTPUT_DIR" /* SARIF_RESULTS_OUTPUT_DIR */, outputDir); const threads = getThreadsFlag( getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger @@ -98520,8 +98452,8 @@ async function run() { for (const language of config.languages) { dbLocations[language] = getCodeQLDatabasePath(config, language); } - core13.setOutput("db-locations", dbLocations); - core13.setOutput("sarif-output", import_path4.default.resolve(outputDir)); + core14.setOutput("db-locations", dbLocations); + core14.setOutput("sarif-output", import_path4.default.resolve(outputDir)); const uploadInput = getOptionalInput("upload"); if (runStats && getUploadValue(uploadInput) === "always") { uploadResult = await uploadFiles( @@ -98532,7 +98464,7 @@ async function run() { logger, CodeScanningTarget ); - core13.setOutput("sarif-id", uploadResult.sarifID); + core14.setOutput("sarif-id", uploadResult.sarifID); if (isCodeQualityEnabled(config)) { const qualityUploadResult = await uploadFiles( outputDir, @@ -98545,7 +98477,7 @@ async function run() { logger, CodeQualityTarget ); - core13.setOutput("quality-sarif-id", qualityUploadResult.sarifID); + core14.setOutput("quality-sarif-id", qualityUploadResult.sarifID); } } else { logger.info("Not uploading results"); @@ -98573,15 +98505,15 @@ async function run() { ); } if (getOptionalInput("expect-error") === "true") { - core13.setFailed( + core14.setFailed( `expect-error input was set to true but no error was thrown.` ); } - core13.exportVariable("CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */, "true"); + core14.exportVariable("CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */, "true"); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); if (getOptionalInput("expect-error") !== "true" || hasBadExpectErrorInput()) { - core13.setFailed(error2.message); + core14.setFailed(error2.message); } await sendStatusReport2( startedAt, @@ -98642,7 +98574,7 @@ async function runWrapper() { try { await runPromise; } catch (error2) { - core13.setFailed(`analyze action failed: ${getErrorMessage(error2)}`); + core14.setFailed(`analyze action failed: ${getErrorMessage(error2)}`); } await checkForTimeout(); } diff --git a/lib/autobuild-action.js b/lib/autobuild-action.js index 964f6ebca..ea7cbdbc3 100644 --- a/lib/autobuild-action.js +++ b/lib/autobuild-action.js @@ -34336,127 +34336,6 @@ var require_del = __commonJS({ } }); -// node_modules/tiny-each-async/index.js -var require_tiny_each_async = __commonJS({ - "node_modules/tiny-each-async/index.js"(exports2, module2) { - "use strict"; - module2.exports = function eachAsync(arr, parallelLimit, iteratorFn, cb) { - var pending = 0; - var index = 0; - var lastIndex = arr.length - 1; - var called = false; - var limit; - var callback; - var iterate; - if (typeof parallelLimit === "number") { - limit = parallelLimit; - iterate = iteratorFn; - callback = cb || function noop() { - }; - } else { - iterate = parallelLimit; - callback = iteratorFn || function noop() { - }; - limit = arr.length; - } - if (!arr.length) { - return callback(); - } - var iteratorLength = iterate.length; - var shouldCallNextIterator = function shouldCallNextIterator2() { - return !called && pending < limit && index < lastIndex; - }; - var iteratorCallback = function iteratorCallback2(err) { - if (called) { - return; - } - pending--; - if (err || index === lastIndex && !pending) { - called = true; - callback(err); - } else if (shouldCallNextIterator()) { - processIterator(++index); - } - }; - var processIterator = function processIterator2() { - pending++; - var args = iteratorLength === 2 ? [arr[index], iteratorCallback] : [arr[index], index, iteratorCallback]; - iterate.apply(null, args); - if (shouldCallNextIterator()) { - processIterator2(++index); - } - }; - processIterator(); - }; - } -}); - -// node_modules/get-folder-size/index.js -var require_get_folder_size = __commonJS({ - "node_modules/get-folder-size/index.js"(exports2, module2) { - "use strict"; - var fs7 = require("fs"); - var path7 = require("path"); - var eachAsync = require_tiny_each_async(); - function readSizeRecursive(seen, item, ignoreRegEx, callback) { - let cb; - let ignoreRegExp; - if (!callback) { - cb = ignoreRegEx; - ignoreRegExp = null; - } else { - cb = callback; - ignoreRegExp = ignoreRegEx; - } - fs7.lstat(item, function lstat(e, stats) { - let total = !e ? stats.size || 0 : 0; - if (stats) { - if (seen.has(stats.ino)) { - return cb(null, 0); - } - seen.add(stats.ino); - } - if (!e && stats.isDirectory()) { - fs7.readdir(item, (err, list) => { - if (err) { - return cb(err); - } - eachAsync( - list, - 5e3, - (dirItem, next) => { - readSizeRecursive( - seen, - path7.join(item, dirItem), - ignoreRegExp, - (error2, size) => { - if (!error2) { - total += size; - } - next(error2); - } - ); - }, - (finalErr) => { - cb(finalErr, total); - } - ); - }); - } else { - if (ignoreRegExp && ignoreRegExp.test(item)) { - total = 0; - } - cb(e, total); - } - }); - } - module2.exports = (...args) => { - args.unshift(/* @__PURE__ */ new Set()); - return readSizeRecursive(...args); - }; - } -}); - // node_modules/semver/internal/constants.js var require_constants9 = __commonJS({ "node_modules/semver/internal/constants.js"(exports2, module2) { @@ -36412,7 +36291,7 @@ var require_package = __commonJS({ "fast-deep-equal": "^3.1.3", "file-url": "^3.0.0", "follow-redirects": "^1.15.11", - "get-folder-size": "^2.0.1", + "get-folder-size": "^5.0.0", "js-yaml": "^4.1.0", jsonschema: "1.4.1", long: "^5.3.2", @@ -38076,7 +37955,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -38086,15 +37965,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core13.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -38758,7 +38637,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs7 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path7 = __importStar4(require("path")); @@ -38809,7 +38688,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core13.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await4(fs7.promises.lstat(searchPath)); } catch (err) { @@ -38881,7 +38760,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core13.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -38897,7 +38776,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -40221,7 +40100,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob = __importStar4(require_glob2()); var io5 = __importStar4(require_io()); @@ -40274,7 +40153,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path7.relative(workspace, file).replace(new RegExp(`\\${path7.sep}`, "g"), "/"); - core13.debug(`Matched: ${relativeFile}`); + core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -40304,7 +40183,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core13.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -40315,10 +40194,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core13.debug(err.message); + core14.debug(err.message); } versionOutput = versionOutput.trim(); - core13.debug(versionOutput); + core14.debug(versionOutput); return versionOutput; }); } @@ -40326,7 +40205,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core13.debug(`zstd version: ${version}`); + core14.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -75638,7 +75517,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -75680,7 +75559,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -75735,14 +75614,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core13.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core13.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -75813,7 +75692,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -75874,9 +75753,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core13.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core13.debug(`${name} - Error is not retryable`); + core14.debug(`${name} - Error is not retryable`); break; } yield sleep(delay); @@ -75981,7 +75860,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -76019,7 +75898,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core13.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -76053,7 +75932,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -76103,7 +75982,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core13.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -76114,7 +75993,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core13.debug("Unable to validate download, no Content-Length header"); + core14.debug("Unable to validate download, no Content-Length header"); } }); } @@ -76234,7 +76113,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core13.debug("Unable to determine content length, downloading file with http-client..."); + core14.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -76314,7 +76193,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -76334,9 +76213,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core13.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -76373,12 +76252,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Download concurrency: ${result.downloadConcurrency}`); - core13.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core13.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core13.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core13.debug(`Lookup only: ${result.lookupOnly}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Download concurrency: ${result.downloadConcurrency}`); + core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core14.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -76558,7 +76437,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs7 = __importStar4(require("fs")); @@ -76576,7 +76455,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core13.debug(`Resource Url: ${url}`); + core14.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -76604,7 +76483,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core13.isDebug()) { + if (core14.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -76617,9 +76496,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core13.setSecret(cacheDownloadUrl); - core13.debug(`Cache Result:`); - core13.debug(JSON.stringify(cacheResult)); + core14.setSecret(cacheDownloadUrl); + core14.debug(`Cache Result:`); + core14.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -76634,10 +76513,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core13.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core13.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -76682,7 +76561,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core13.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -76704,7 +76583,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core13.debug("Awaiting all uploads"); + core14.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -76747,16 +76626,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core13.debug("Upload cache"); + core14.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core13.debug("Commiting cache"); + core14.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core13.info("Cache saved successfully"); + core14.info("Cache saved successfully"); } }); } @@ -82182,7 +82061,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var path7 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -82235,7 +82114,7 @@ var require_cache3 = __commonJS({ function restoreCache3(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -82251,8 +82130,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82270,19 +82149,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path7.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -82290,16 +82169,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core13.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82310,8 +82189,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82329,30 +82208,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core13.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core14.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core13.info(`Cache hit for restore-key: ${response.matchedKey}`); + core14.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core13.info(`Cache hit for: ${response.matchedKey}`); + core14.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path7.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive path: ${archivePath}`); - core13.debug(`Starting download of archive to: ${archivePath}`); + core14.debug(`Archive path: ${archivePath}`); + core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core13.isDebug()) { + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -82360,9 +82239,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core13.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -82371,7 +82250,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82380,7 +82259,7 @@ var require_cache3 = __commonJS({ function saveCache3(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -82399,26 +82278,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path7.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -82431,26 +82310,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core13.debug(`Saving Cache (ID: ${cacheId})`); + core14.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -82463,26 +82342,26 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path7.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } options.archiveSizeBytes = archiveFileSize; - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -82496,10 +82375,10 @@ var require_cache3 = __commonJS({ } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core13.debug(`Failed to reserve cache: ${error2}`); + core14.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core13.debug(`Attempting to upload cache located at: ${archivePath}`); + core14.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -82507,7 +82386,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core13.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); } @@ -82517,19 +82396,19 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -82737,7 +82616,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -82760,10 +82639,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core13.info(err.message); + core14.info(err.message); } const seconds = this.getSleepAmount(); - core13.info(`Waiting ${seconds} seconds before trying again`); + core14.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -82843,7 +82722,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var io5 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs7 = __importStar4(require("fs")); @@ -82872,8 +82751,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path7.join(_getTempDirectory(), crypto.randomUUID()); yield io5.mkdirP(path7.dirname(dest)); - core13.debug(`Downloading ${url}`); - core13.debug(`Destination ${dest}`); + core14.debug(`Downloading ${url}`); + core14.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -82900,7 +82779,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core13.debug("set auth"); + core14.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -82909,7 +82788,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError(response.message.statusCode); - core13.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core14.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream.pipeline); @@ -82918,16 +82797,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs7.createWriteStream(dest)); - core13.debug("download complete"); + core14.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core13.debug("download failed"); + core14.debug("download failed"); try { yield io5.rmRF(dest); } catch (err) { - core13.debug(`Failed to delete '${dest}'. ${err.message}`); + core14.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -82942,7 +82821,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core13.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -82992,7 +82871,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core13.debug("Checking tar --version"); + core14.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -83002,7 +82881,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core13.debug(versionOutput.trim()); + core14.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -83010,7 +82889,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core13.isDebug() && !flags.includes("v")) { + if (core14.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -83042,7 +82921,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core13.isDebug()) { + if (core14.isDebug()) { args.push("-v"); } const xarPath = yield io5.which("xar", true); @@ -83087,7 +82966,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core13.debug(`Using pwsh at path: ${pwshPath}`); + core14.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -83107,7 +82986,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io5.which("powershell", true); - core13.debug(`Using powershell at path: ${powershellPath}`); + core14.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -83116,7 +82995,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io5.which("unzip", true); const args = [file]; - if (!core13.isDebug()) { + if (!core14.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -83127,8 +83006,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch = arch || os2.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch}`); - core13.debug(`source dir: ${sourceDir}`); + core14.debug(`Caching tool ${tool} ${version} ${arch}`); + core14.debug(`source dir: ${sourceDir}`); if (!fs7.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -83146,14 +83025,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch = arch || os2.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch}`); - core13.debug(`source file: ${sourceFile}`); + core14.debug(`Caching tool ${tool} ${version} ${arch}`); + core14.debug(`source file: ${sourceFile}`); if (!fs7.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch); const destPath = path7.join(destFolder, targetFile); - core13.debug(`destination file ${destPath}`); + core14.debug(`destination file ${destPath}`); yield io5.cp(sourceFile, destPath); _completeToolPath(tool, version, arch); return destFolder; @@ -83177,12 +83056,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path7.join(_getCacheDirectory(), toolName, versionSpec, arch); - core13.debug(`checking cache: ${cachePath}`); + core14.debug(`checking cache: ${cachePath}`); if (fs7.existsSync(cachePath) && fs7.existsSync(`${cachePath}.complete`)) { - core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); + core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { - core13.debug("not found"); + core14.debug("not found"); } } return toolPath; @@ -83213,7 +83092,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core13.debug("set auth"); + core14.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -83234,7 +83113,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core13.debug("Invalid json"); + core14.debug("Invalid json"); } } return releases; @@ -83260,7 +83139,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path7.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); - core13.debug(`destination ${folderPath}`); + core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io5.rmRF(folderPath); yield io5.rmRF(markerPath); @@ -83272,19 +83151,19 @@ var require_tool_cache = __commonJS({ const folderPath = path7.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; fs7.writeFileSync(markerPath, ""); - core13.debug("finished caching tool"); + core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core13.debug(`isExplicit: ${c}`); + core14.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core13.debug(`explicit? ${valid3}`); + core14.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core13.debug(`evaluating ${versions.length} versions`); + core14.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -83300,9 +83179,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core13.debug(`matched: ${version}`); + core14.debug(`matched: ${version}`); } else { - core13.debug("match not found"); + core14.debug("match not found"); } return version; } @@ -83879,18 +83758,18 @@ var require_follow_redirects = __commonJS({ }); // src/autobuild-action.ts -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); // src/actions-util.ts var fs = __toESM(require("fs")); -var core3 = __toESM(require_core()); +var core4 = __toESM(require_core()); var toolrunner = __toESM(require_toolrunner()); var github = __toESM(require_github()); var io2 = __toESM(require_io()); // src/util.ts var path = __toESM(require("path")); -var core2 = __toESM(require_core()); +var core3 = __toESM(require_core()); var exec = __toESM(require_exec()); var io = __toESM(require_io()); @@ -84031,7 +83910,61 @@ function checkDiskSpace(directoryPath, dependencies = { // src/util.ts var import_del = __toESM(require_del()); -var import_get_folder_size = __toESM(require_get_folder_size()); + +// node_modules/get-folder-size/index.js +var import_node_path2 = require("node:path"); +async function getFolderSize(itemPath, options) { + return await core(itemPath, options, { errors: true }); +} +getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); +getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); +async function core(rootItemPath, options = {}, returnType = {}) { + const fs7 = options.fs || await import("node:fs/promises"); + let folderSize = 0n; + const foundInos = /* @__PURE__ */ new Set(); + const errors = []; + await processItem(rootItemPath); + async function processItem(itemPath) { + if (options.ignore?.test(itemPath)) return; + const stats = returnType.strict ? await fs7.lstat(itemPath, { bigint: true }) : await fs7.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + if (typeof stats !== "object") return; + if (!foundInos.has(stats.ino)) { + foundInos.add(stats.ino); + folderSize += stats.size; + } + if (stats.isDirectory()) { + const directoryItems = returnType.strict ? await fs7.readdir(itemPath) : await fs7.readdir(itemPath).catch((error2) => errors.push(error2)); + if (typeof directoryItems !== "object") return; + await Promise.all( + directoryItems.map( + (directoryItem) => processItem((0, import_node_path2.join)(itemPath, directoryItem)) + ) + ); + } + } + if (!options.bigint) { + if (folderSize > BigInt(Number.MAX_SAFE_INTEGER)) { + const error2 = new RangeError( + "The folder size is too large to return as a Number. You can instruct this package to return a BigInt instead." + ); + if (returnType.strict) { + throw error2; + } + errors.push(error2); + folderSize = Number.MAX_SAFE_INTEGER; + } else { + folderSize = Number(folderSize); + } + } + if (returnType.errors) { + return { + size: folderSize, + errors: errors.length > 0 ? errors : null + }; + } else { + return folderSize; + } +} // node_modules/js-yaml/dist/js-yaml.mjs function isNothing(subject) { @@ -84602,7 +84535,7 @@ var json = failsafe.extend({ float ] }); -var core = json; +var core2 = json; var YAML_DATE_REGEXP = new RegExp( "^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$" ); @@ -84816,7 +84749,7 @@ var set = new type("tag:yaml.org,2002:set", { resolve: resolveYamlSet, construct: constructYamlSet }); -var _default = core.extend({ +var _default = core2.extend({ implicit: [ timestamp, merge @@ -86694,7 +86627,7 @@ function checkGitHubVersionInRange(version, logger) { ); } hasBeenWarnedAboutVersion = true; - core2.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true); + core3.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true); } function apiVersionInRange(version, minimumVersion2, maximumVersion2) { if (!semver.satisfies(version, `>=${minimumVersion2}`)) { @@ -86706,11 +86639,11 @@ function apiVersionInRange(version, minimumVersion2, maximumVersion2) { return void 0; } function initializeEnvironment(version) { - core2.exportVariable("CODEQL_ACTION_FEATURE_MULTI_LANGUAGE" /* FEATURE_MULTI_LANGUAGE */, "false"); - core2.exportVariable("CODEQL_ACTION_FEATURE_SANDWICH" /* FEATURE_SANDWICH */, "false"); - core2.exportVariable("CODEQL_ACTION_FEATURE_SARIF_COMBINE" /* FEATURE_SARIF_COMBINE */, "true"); - core2.exportVariable("CODEQL_ACTION_FEATURE_WILL_UPLOAD" /* FEATURE_WILL_UPLOAD */, "true"); - core2.exportVariable("CODEQL_ACTION_VERSION" /* VERSION */, version); + core3.exportVariable("CODEQL_ACTION_FEATURE_MULTI_LANGUAGE" /* FEATURE_MULTI_LANGUAGE */, "false"); + core3.exportVariable("CODEQL_ACTION_FEATURE_SANDWICH" /* FEATURE_SANDWICH */, "false"); + core3.exportVariable("CODEQL_ACTION_FEATURE_SARIF_COMBINE" /* FEATURE_SARIF_COMBINE */, "true"); + core3.exportVariable("CODEQL_ACTION_FEATURE_WILL_UPLOAD" /* FEATURE_WILL_UPLOAD */, "true"); + core3.exportVariable("CODEQL_ACTION_VERSION" /* VERSION */, version); } function getRequiredEnvParam(paramName) { const value = process.env[paramName]; @@ -86773,7 +86706,7 @@ async function checkDiskUsage(logger) { } else { logger.debug(message); } - core2.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true"); + core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true"); } return { numAvailableBytes: diskUsage.free, @@ -86793,10 +86726,10 @@ function checkActionVersion(version, githubVersion) { semver.coerce(githubVersion.version) ?? "0.0.0", ">=3.11" )) { - core2.error( + core3.error( "CodeQL Action major versions v1 and v2 have been deprecated. Please update all occurrences of the CodeQL Action in your workflow files to v3. For more information, see https://github.blog/changelog/2025-01-10-code-scanning-codeql-action-v2-is-now-deprecated/" ); - core2.exportVariable("CODEQL_ACTION_DID_LOG_VERSION_DEPRECATION" /* LOG_VERSION_DEPRECATION */, "true"); + core3.exportVariable("CODEQL_ACTION_DID_LOG_VERSION_DEPRECATION" /* LOG_VERSION_DEPRECATION */, "true"); } } } @@ -86813,13 +86746,13 @@ async function checkSipEnablement(logger) { if (sipStatusOutput.stdout.includes( "System Integrity Protection status: enabled." )) { - core2.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true"); + core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true"); return true; } if (sipStatusOutput.stdout.includes( "System Integrity Protection status: disabled." )) { - core2.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false"); + core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false"); return false; } } @@ -86843,14 +86776,14 @@ async function asyncSome(array, predicate) { // src/actions-util.ts var pkg = require_package(); var getRequiredInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); if (!value) { throw new ConfigurationError(`Input required and not supplied: ${name}`); } return value; }; var getOptionalInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); return value.length > 0 ? value : void 0; }; function getTemporaryDirectory() { @@ -86964,7 +86897,7 @@ async function runTool(cmd, args = [], opts = {}) { } // src/api-client.ts -var core4 = __toESM(require_core()); +var core5 = __toESM(require_core()); var githubUtils = __toESM(require_utils4()); var retry = __toESM(require_dist_node15()); var import_console_log_level = __toESM(require_console_log_level()); @@ -87069,17 +87002,17 @@ async function getAnalysisKey() { const workflowPath = await getWorkflowRelativePath(); const jobName = getRequiredEnvParam("GITHUB_JOB"); analysisKey = `${workflowPath}:${jobName}`; - core4.exportVariable(analysisKeyEnvVar, analysisKey); + core5.exportVariable(analysisKeyEnvVar, analysisKey); return analysisKey; } // src/autobuild.ts -var core10 = __toESM(require_core()); +var core11 = __toESM(require_core()); // src/codeql.ts var fs6 = __toESM(require("fs")); var path6 = __toESM(require("path")); -var core9 = __toESM(require_core()); +var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); // src/cli-errors.ts @@ -87333,7 +87266,7 @@ var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { var supportedAnalysisKinds = new Set(Object.values(AnalysisKind)); // src/caching-utils.ts -var core5 = __toESM(require_core()); +var core6 = __toESM(require_core()); // src/feature-flags.ts var fs3 = __toESM(require("fs")); @@ -87350,13 +87283,13 @@ var path2 = __toESM(require("path")); var actionsCache = __toESM(require_cache3()); // src/git-utils.ts -var core6 = __toESM(require_core()); +var core7 = __toESM(require_core()); var toolrunner2 = __toESM(require_toolrunner()); var io3 = __toESM(require_io()); var runGitCommand = async function(workingDirectory, args, customErrorMessage) { let stdout = ""; let stderr = ""; - core6.debug(`Running git command: git ${args.join(" ")}`); + core7.debug(`Running git command: git ${args.join(" ")}`); try { await new toolrunner2.ToolRunner(await io3.which("git", true), args, { silent: true, @@ -87376,7 +87309,7 @@ var runGitCommand = async function(workingDirectory, args, customErrorMessage) { if (stderr.includes("not a git repository")) { reason = "The checkout path provided to the action does not appear to be a git repository."; } - core6.info(`git call failed. ${customErrorMessage} Error: ${reason}`); + core7.info(`git call failed. ${customErrorMessage} Error: ${reason}`); throw error2; } }; @@ -87487,7 +87420,7 @@ async function getRef() { ) !== head; if (hasChangedRef) { const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head"); - core6.debug( + core7.debug( `No longer on merge commit, rewriting ref from ${ref} to ${newRef}.` ); return newRef; @@ -87513,9 +87446,9 @@ async function isAnalyzingDefaultBranch() { } // src/logging.ts -var core7 = __toESM(require_core()); +var core8 = __toESM(require_core()); function getActionsLogger() { - return core7; + return core8; } // src/overlay-database-utils.ts @@ -88127,7 +88060,7 @@ var toolcache = __toESM(require_tool_cache()); var semver5 = __toESM(require_semver2()); // src/tools-download.ts -var core8 = __toESM(require_core()); +var core9 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); var toolcache2 = __toESM(require_tool_cache()); var import_follow_redirects = __toESM(require_follow_redirects()); @@ -88619,12 +88552,12 @@ ${output}` ); } else if (checkVersion && process.env["CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */] !== "true" && !await codeQlVersionAtLeast(codeql, CODEQL_NEXT_MINIMUM_VERSION)) { const result = await codeql.getVersion(); - core9.warning( + core10.warning( `CodeQL CLI version ${result.version} was discontinued on ${GHES_MOST_RECENT_DEPRECATION_DATE} alongside GitHub Enterprise Server ${GHES_VERSION_MOST_RECENTLY_DEPRECATED} and will not be supported by the next minor release of the CodeQL Action. Please update to CodeQL CLI version ${CODEQL_NEXT_MINIMUM_VERSION} or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version. Alternatively, if you want to continue using CodeQL CLI version ${result.version}, you can replace 'github/codeql-action/*@v${getActionVersion().split(".")[0]}' by 'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to continue using this version of the CodeQL Action.` ); - core9.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); + core10.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); } return codeql; } @@ -88785,16 +88718,16 @@ async function setupCppAutobuild(codeql, logger) { logger.info( `Disabling ${featureName} as we are on a self-hosted runner.${getWorkflowEventName() !== "dynamic" ? ` To override this, set the ${envVar} environment variable to 'true' in your workflow. See ${"https://docs.github.com/en/actions/learn-github-actions/variables#defining-environment-variables-for-a-single-workflow" /* DEFINE_ENV_VARIABLES */} for more information.` : ""}` ); - core10.exportVariable(envVar, "false"); + core11.exportVariable(envVar, "false"); } else { logger.info( `Enabling ${featureName}. This can be disabled by setting the ${envVar} environment variable to 'false'. See ${"https://docs.github.com/en/actions/learn-github-actions/variables#defining-environment-variables-for-a-single-workflow" /* DEFINE_ENV_VARIABLES */} for more information.` ); - core10.exportVariable(envVar, "true"); + core11.exportVariable(envVar, "true"); } } else { logger.info(`Disabling ${featureName}.`); - core10.exportVariable(envVar, "false"); + core11.exportVariable(envVar, "false"); } } async function runAutobuild(config, language, logger) { @@ -88809,14 +88742,14 @@ async function runAutobuild(config, language, logger) { await codeQL.runAutobuild(config, language); } if (language === "go" /* go */) { - core10.exportVariable("CODEQL_ACTION_DID_AUTOBUILD_GOLANG" /* DID_AUTOBUILD_GOLANG */, "true"); + core11.exportVariable("CODEQL_ACTION_DID_AUTOBUILD_GOLANG" /* DID_AUTOBUILD_GOLANG */, "true"); } logger.endGroup(); } // src/status-report.ts var os = __toESM(require("os")); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -88832,12 +88765,12 @@ function getActionsStatus(error2, otherFailureCause) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core11.exportVariable( + core12.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core11.exportVariable( + core12.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -88856,14 +88789,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core12.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core12.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -88941,9 +88874,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scan async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core11.debug(`Sending status report: ${statusReportJSON}`); + core12.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core11.debug("In test mode. Status reports are not uploaded."); + core12.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -88962,26 +88895,26 @@ async function sendStatusReport(statusReport) { switch (e.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core11.warning( + core12.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading Code Scanning results requires write access. To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core11.warning(e.message); + core12.warning(e.message); } return; case 404: - core11.warning(e.message); + core12.warning(e.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core11.debug(INCOMPATIBLE_MSG); + core12.debug(INCOMPATIBLE_MSG); } else { - core11.debug(OUT_OF_DATE_MSG); + core12.debug(OUT_OF_DATE_MSG); } return; } } - core11.warning( + core12.warning( `An unexpected error occurred when sending code scanning status report: ${getErrorMessage( e )}` @@ -89057,7 +88990,7 @@ async function run() { await endTracingForCluster(codeql, config, logger); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); - core12.setFailed( + core13.setFailed( `We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error2.message}` ); await sendCompletedStatusReport( @@ -89070,14 +89003,14 @@ async function run() { ); return; } - core12.exportVariable("CODEQL_ACTION_AUTOBUILD_DID_COMPLETE_SUCCESSFULLY" /* AUTOBUILD_DID_COMPLETE_SUCCESSFULLY */, "true"); + core13.exportVariable("CODEQL_ACTION_AUTOBUILD_DID_COMPLETE_SUCCESSFULLY" /* AUTOBUILD_DID_COMPLETE_SUCCESSFULLY */, "true"); await sendCompletedStatusReport(config, logger, startedAt, languages ?? []); } async function runWrapper() { try { await run(); } catch (error2) { - core12.setFailed(`autobuild action failed. ${getErrorMessage(error2)}`); + core13.setFailed(`autobuild action failed. ${getErrorMessage(error2)}`); } } void runWrapper(); diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 5ef71128c..d013d7b12 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -34336,127 +34336,6 @@ var require_del = __commonJS({ } }); -// node_modules/tiny-each-async/index.js -var require_tiny_each_async = __commonJS({ - "node_modules/tiny-each-async/index.js"(exports2, module2) { - "use strict"; - module2.exports = function eachAsync(arr, parallelLimit, iteratorFn, cb) { - var pending = 0; - var index = 0; - var lastIndex = arr.length - 1; - var called = false; - var limit; - var callback; - var iterate; - if (typeof parallelLimit === "number") { - limit = parallelLimit; - iterate = iteratorFn; - callback = cb || function noop() { - }; - } else { - iterate = parallelLimit; - callback = iteratorFn || function noop() { - }; - limit = arr.length; - } - if (!arr.length) { - return callback(); - } - var iteratorLength = iterate.length; - var shouldCallNextIterator = function shouldCallNextIterator2() { - return !called && pending < limit && index < lastIndex; - }; - var iteratorCallback = function iteratorCallback2(err) { - if (called) { - return; - } - pending--; - if (err || index === lastIndex && !pending) { - called = true; - callback(err); - } else if (shouldCallNextIterator()) { - processIterator(++index); - } - }; - var processIterator = function processIterator2() { - pending++; - var args = iteratorLength === 2 ? [arr[index], iteratorCallback] : [arr[index], index, iteratorCallback]; - iterate.apply(null, args); - if (shouldCallNextIterator()) { - processIterator2(++index); - } - }; - processIterator(); - }; - } -}); - -// node_modules/get-folder-size/index.js -var require_get_folder_size = __commonJS({ - "node_modules/get-folder-size/index.js"(exports2, module2) { - "use strict"; - var fs17 = require("fs"); - var path15 = require("path"); - var eachAsync = require_tiny_each_async(); - function readSizeRecursive(seen, item, ignoreRegEx, callback) { - let cb; - let ignoreRegExp; - if (!callback) { - cb = ignoreRegEx; - ignoreRegExp = null; - } else { - cb = callback; - ignoreRegExp = ignoreRegEx; - } - fs17.lstat(item, function lstat(e, stats) { - let total = !e ? stats.size || 0 : 0; - if (stats) { - if (seen.has(stats.ino)) { - return cb(null, 0); - } - seen.add(stats.ino); - } - if (!e && stats.isDirectory()) { - fs17.readdir(item, (err, list) => { - if (err) { - return cb(err); - } - eachAsync( - list, - 5e3, - (dirItem, next) => { - readSizeRecursive( - seen, - path15.join(item, dirItem), - ignoreRegExp, - (error2, size) => { - if (!error2) { - total += size; - } - next(error2); - } - ); - }, - (finalErr) => { - cb(finalErr, total); - } - ); - }); - } else { - if (ignoreRegExp && ignoreRegExp.test(item)) { - total = 0; - } - cb(e, total); - } - }); - } - module2.exports = (...args) => { - args.unshift(/* @__PURE__ */ new Set()); - return readSizeRecursive(...args); - }; - } -}); - // node_modules/semver/internal/constants.js var require_constants9 = __commonJS({ "node_modules/semver/internal/constants.js"(exports2, module2) { @@ -36412,7 +36291,7 @@ var require_package = __commonJS({ "fast-deep-equal": "^3.1.3", "file-url": "^3.0.0", "follow-redirects": "^1.15.11", - "get-folder-size": "^2.0.1", + "get-folder-size": "^5.0.0", "js-yaml": "^4.1.0", jsonschema: "1.4.1", long: "^5.3.2", @@ -38076,7 +37955,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -38086,15 +37965,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core17.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core18.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core17.debug(`implicitDescendants '${result.implicitDescendants}'`); + core18.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core17.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core18.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -38758,7 +38637,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var fs17 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path15 = __importStar4(require("path")); @@ -38809,7 +38688,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core17.debug(`Search path '${searchPath}'`); + core18.debug(`Search path '${searchPath}'`); try { yield __await4(fs17.promises.lstat(searchPath)); } catch (err) { @@ -38881,7 +38760,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core17.debug(`Broken symlink '${item.path}'`); + core18.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -38897,7 +38776,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core17.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core18.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -40221,7 +40100,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob2 = __importStar4(require_glob2()); var io7 = __importStar4(require_io()); @@ -40274,7 +40153,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path15.relative(workspace, file).replace(new RegExp(`\\${path15.sep}`, "g"), "/"); - core17.debug(`Matched: ${relativeFile}`); + core18.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -40304,7 +40183,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core17.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core18.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -40315,10 +40194,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core17.debug(err.message); + core18.debug(err.message); } versionOutput = versionOutput.trim(); - core17.debug(versionOutput); + core18.debug(versionOutput); return versionOutput; }); } @@ -40326,7 +40205,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core17.debug(`zstd version: ${version}`); + core18.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -75638,7 +75517,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -75680,7 +75559,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core17.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core18.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -75735,14 +75614,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core17.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core18.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core17.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core18.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -75813,7 +75692,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -75874,9 +75753,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core17.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core18.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core17.debug(`${name} - Error is not retryable`); + core18.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -75981,7 +75860,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -76019,7 +75898,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core17.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core18.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -76053,7 +75932,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core17.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core18.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -76103,7 +75982,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core17.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core18.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -76114,7 +75993,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core17.debug("Unable to validate download, no Content-Length header"); + core18.debug("Unable to validate download, no Content-Length header"); } }); } @@ -76234,7 +76113,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core17.debug("Unable to determine content length, downloading file with http-client..."); + core18.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -76314,7 +76193,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -76334,9 +76213,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core17.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core17.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core17.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core18.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core18.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core18.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -76373,12 +76252,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core17.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core17.debug(`Download concurrency: ${result.downloadConcurrency}`); - core17.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core17.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core17.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core17.debug(`Lookup only: ${result.lookupOnly}`); + core18.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core18.debug(`Download concurrency: ${result.downloadConcurrency}`); + core18.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core18.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core18.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core18.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -76558,7 +76437,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs17 = __importStar4(require("fs")); @@ -76576,7 +76455,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core17.debug(`Resource Url: ${url}`); + core18.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -76604,7 +76483,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core17.isDebug()) { + if (core18.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -76617,9 +76496,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core17.setSecret(cacheDownloadUrl); - core17.debug(`Cache Result:`); - core17.debug(JSON.stringify(cacheResult)); + core18.setSecret(cacheDownloadUrl); + core18.debug(`Cache Result:`); + core18.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -76634,10 +76513,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core17.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core18.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core17.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core18.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -76682,7 +76561,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core17.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core18.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -76704,7 +76583,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core17.debug("Awaiting all uploads"); + core18.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -76747,16 +76626,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core17.debug("Upload cache"); + core18.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core17.debug("Commiting cache"); + core18.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core18.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core17.info("Cache saved successfully"); + core18.info("Cache saved successfully"); } }); } @@ -82182,7 +82061,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var path15 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -82235,7 +82114,7 @@ var require_cache3 = __commonJS({ function restoreCache4(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core17.debug(`Cache service version: ${cacheServiceVersion}`); + core18.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -82251,8 +82130,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core17.debug("Resolved Keys:"); - core17.debug(JSON.stringify(keys)); + core18.debug("Resolved Keys:"); + core18.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82270,19 +82149,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core17.info("Lookup only - skipping download"); + core18.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path15.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive Path: ${archivePath}`); + core18.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core17.isDebug()) { + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core18.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core17.info("Cache restored successfully"); + core18.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -82290,16 +82169,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to restore: ${error2.message}`); + core18.error(`Failed to restore: ${error2.message}`); } else { - core17.warning(`Failed to restore: ${error2.message}`); + core18.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core17.debug(`Failed to delete archive: ${error2}`); + core18.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82310,8 +82189,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core17.debug("Resolved Keys:"); - core17.debug(JSON.stringify(keys)); + core18.debug("Resolved Keys:"); + core18.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82329,30 +82208,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core17.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core18.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core17.info(`Cache hit for restore-key: ${response.matchedKey}`); + core18.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core17.info(`Cache hit for: ${response.matchedKey}`); + core18.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core17.info("Lookup only - skipping download"); + core18.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path15.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive path: ${archivePath}`); - core17.debug(`Starting download of archive to: ${archivePath}`); + core18.debug(`Archive path: ${archivePath}`); + core18.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core17.isDebug()) { + core18.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core17.info("Cache restored successfully"); + core18.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -82360,9 +82239,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to restore: ${error2.message}`); + core18.error(`Failed to restore: ${error2.message}`); } else { - core17.warning(`Failed to restore: ${error2.message}`); + core18.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -82371,7 +82250,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core17.debug(`Failed to delete archive: ${error2}`); + core18.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82380,7 +82259,7 @@ var require_cache3 = __commonJS({ function saveCache4(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core17.debug(`Cache service version: ${cacheServiceVersion}`); + core18.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -82399,26 +82278,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core17.debug("Cache Paths:"); - core17.debug(`${JSON.stringify(cachePaths)}`); + core18.debug("Cache Paths:"); + core18.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path15.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive Path: ${archivePath}`); + core18.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core17.isDebug()) { + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.debug(`File Size: ${archiveFileSize}`); + core18.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core17.debug("Reserving Cache"); + core18.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -82431,26 +82310,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core17.debug(`Saving Cache (ID: ${cacheId})`); + core18.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core17.info(`Failed to save: ${typedError.message}`); + core18.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to save: ${typedError.message}`); + core18.error(`Failed to save: ${typedError.message}`); } else { - core17.warning(`Failed to save: ${typedError.message}`); + core18.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core17.debug(`Failed to delete archive: ${error2}`); + core18.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -82463,26 +82342,26 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core17.debug("Cache Paths:"); - core17.debug(`${JSON.stringify(cachePaths)}`); + core18.debug("Cache Paths:"); + core18.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path15.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive Path: ${archivePath}`); + core18.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core17.isDebug()) { + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.debug(`File Size: ${archiveFileSize}`); + core18.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } options.archiveSizeBytes = archiveFileSize; - core17.debug("Reserving Cache"); + core18.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -82496,10 +82375,10 @@ var require_cache3 = __commonJS({ } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core17.debug(`Failed to reserve cache: ${error2}`); + core18.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core17.debug(`Attempting to upload cache located at: ${archivePath}`); + core18.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -82507,7 +82386,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core17.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core18.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); } @@ -82517,19 +82396,19 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core17.info(`Failed to save: ${typedError.message}`); + core18.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to save: ${typedError.message}`); + core18.error(`Failed to save: ${typedError.message}`); } else { - core17.warning(`Failed to save: ${typedError.message}`); + core18.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core17.debug(`Failed to delete archive: ${error2}`); + core18.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -82737,7 +82616,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -82760,10 +82639,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core17.info(err.message); + core18.info(err.message); } const seconds = this.getSleepAmount(); - core17.info(`Waiting ${seconds} seconds before trying again`); + core18.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -82843,7 +82722,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var io7 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs17 = __importStar4(require("fs")); @@ -82872,8 +82751,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path15.join(_getTempDirectory(), crypto.randomUUID()); yield io7.mkdirP(path15.dirname(dest)); - core17.debug(`Downloading ${url}`); - core17.debug(`Destination ${dest}`); + core18.debug(`Downloading ${url}`); + core18.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -82900,7 +82779,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core17.debug("set auth"); + core18.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -82909,7 +82788,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError(response.message.statusCode); - core17.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core18.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -82918,16 +82797,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs17.createWriteStream(dest)); - core17.debug("download complete"); + core18.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core17.debug("download failed"); + core18.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core17.debug(`Failed to delete '${dest}'. ${err.message}`); + core18.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -82942,7 +82821,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core17.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core18.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -82992,7 +82871,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core17.debug("Checking tar --version"); + core18.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -83002,7 +82881,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core17.debug(versionOutput.trim()); + core18.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -83010,7 +82889,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core17.isDebug() && !flags.includes("v")) { + if (core18.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -83042,7 +82921,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core17.isDebug()) { + if (core18.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -83087,7 +82966,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core17.debug(`Using pwsh at path: ${pwshPath}`); + core18.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -83107,7 +82986,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core17.debug(`Using powershell at path: ${powershellPath}`); + core18.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -83116,7 +82995,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core17.isDebug()) { + if (!core18.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -83127,8 +83006,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core17.debug(`Caching tool ${tool} ${version} ${arch2}`); - core17.debug(`source dir: ${sourceDir}`); + core18.debug(`Caching tool ${tool} ${version} ${arch2}`); + core18.debug(`source dir: ${sourceDir}`); if (!fs17.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -83146,14 +83025,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core17.debug(`Caching tool ${tool} ${version} ${arch2}`); - core17.debug(`source file: ${sourceFile}`); + core18.debug(`Caching tool ${tool} ${version} ${arch2}`); + core18.debug(`source file: ${sourceFile}`); if (!fs17.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path15.join(destFolder, targetFile); - core17.debug(`destination file ${destPath}`); + core18.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -83177,12 +83056,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path15.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core17.debug(`checking cache: ${cachePath}`); + core18.debug(`checking cache: ${cachePath}`); if (fs17.existsSync(cachePath) && fs17.existsSync(`${cachePath}.complete`)) { - core17.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core18.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core17.debug("not found"); + core18.debug("not found"); } } return toolPath; @@ -83213,7 +83092,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core17.debug("set auth"); + core18.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -83234,7 +83113,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core17.debug("Invalid json"); + core18.debug("Invalid json"); } } return releases; @@ -83260,7 +83139,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path15.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); - core17.debug(`destination ${folderPath}`); + core18.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -83272,19 +83151,19 @@ var require_tool_cache = __commonJS({ const folderPath = path15.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs17.writeFileSync(markerPath, ""); - core17.debug("finished caching tool"); + core18.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core17.debug(`isExplicit: ${c}`); + core18.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core17.debug(`explicit? ${valid3}`); + core18.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core17.debug(`evaluating ${versions.length} versions`); + core18.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -83300,9 +83179,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core17.debug(`matched: ${version}`); + core18.debug(`matched: ${version}`); } else { - core17.debug("match not found"); + core18.debug("match not found"); } return version; } @@ -85976,14 +85855,14 @@ var require_retention = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getExpiration = void 0; var generated_1 = require_generated(); - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); function getExpiration(retentionDays) { if (!retentionDays) { return void 0; } const maxRetentionDays = getRetentionDays(); if (maxRetentionDays && maxRetentionDays < retentionDays) { - core17.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); + core18.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); retentionDays = maxRetentionDays; } const expirationDate = /* @__PURE__ */ new Date(); @@ -86575,7 +86454,7 @@ var require_util11 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getBackendIdsFromToken = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var config_1 = require_config2(); var jwt_decode_1 = __importDefault4(require_jwt_decode_cjs()); var InvalidJwtError = new Error("Failed to get backend IDs: The provided JWT token is invalid and/or missing claims"); @@ -86601,8 +86480,8 @@ var require_util11 = __commonJS({ workflowRunBackendId: scopeParts[1], workflowJobRunBackendId: scopeParts[2] }; - core17.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); - core17.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); + core18.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); + core18.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); return ids; } throw InvalidJwtError; @@ -86673,7 +86552,7 @@ var require_blob_upload = __commonJS({ exports2.uploadZipToBlobStorage = void 0; var storage_blob_1 = require_dist7(); var config_1 = require_config2(); - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var crypto = __importStar4(require("crypto")); var stream2 = __importStar4(require("stream")); var errors_1 = require_errors3(); @@ -86699,9 +86578,9 @@ var require_blob_upload = __commonJS({ const bufferSize = (0, config_1.getUploadChunkSize)(); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); - core17.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); + core18.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); const uploadCallback = (progress) => { - core17.info(`Uploaded bytes ${progress.loadedBytes}`); + core18.info(`Uploaded bytes ${progress.loadedBytes}`); uploadByteCount = progress.loadedBytes; lastProgressTime = Date.now(); }; @@ -86715,7 +86594,7 @@ var require_blob_upload = __commonJS({ const hashStream = crypto.createHash("sha256"); zipUploadStream.pipe(uploadStream); zipUploadStream.pipe(hashStream).setEncoding("hex"); - core17.info("Beginning upload of artifact content to blob storage"); + core18.info("Beginning upload of artifact content to blob storage"); try { yield Promise.race([ blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options), @@ -86729,12 +86608,12 @@ var require_blob_upload = __commonJS({ } finally { abortController.abort(); } - core17.info("Finished uploading artifact content to blob storage!"); + core18.info("Finished uploading artifact content to blob storage!"); hashStream.end(); sha256Hash = hashStream.read(); - core17.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`); + core18.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`); if (uploadByteCount === 0) { - core17.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); + core18.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); } return { uploadSize: uploadByteCount, @@ -89790,7 +89669,7 @@ var require_BufferList = __commonJS({ this.head = this.tail = null; this.length = 0; }; - BufferList.prototype.join = function join13(s) { + BufferList.prototype.join = function join14(s) { if (this.length === 0) return ""; var p = this.head; var ret = "" + p.data; @@ -110793,7 +110672,7 @@ var require_zip2 = __commonJS({ var stream2 = __importStar4(require("stream")); var promises_1 = require("fs/promises"); var archiver2 = __importStar4(require_archiver()); - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var config_1 = require_config2(); exports2.DEFAULT_COMPRESSION_LEVEL = 6; var ZipUploadStream = class extends stream2.Transform { @@ -110810,7 +110689,7 @@ var require_zip2 = __commonJS({ exports2.ZipUploadStream = ZipUploadStream; function createZipUploadStream(uploadSpecification, compressionLevel = exports2.DEFAULT_COMPRESSION_LEVEL) { return __awaiter4(this, void 0, void 0, function* () { - core17.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); + core18.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); const zip = archiver2.create("zip", { highWaterMark: (0, config_1.getUploadChunkSize)(), zlib: { level: compressionLevel } @@ -110834,8 +110713,8 @@ var require_zip2 = __commonJS({ } const bufferSize = (0, config_1.getUploadChunkSize)(); const zipUploadStream = new ZipUploadStream(bufferSize); - core17.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); - core17.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); + core18.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); + core18.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); zip.pipe(zipUploadStream); zip.finalize(); return zipUploadStream; @@ -110843,24 +110722,24 @@ var require_zip2 = __commonJS({ } exports2.createZipUploadStream = createZipUploadStream; var zipErrorCallback = (error2) => { - core17.error("An error has occurred while creating the zip file for upload"); - core17.info(error2); + core18.error("An error has occurred while creating the zip file for upload"); + core18.info(error2); throw new Error("An error has occurred during zip creation for the artifact"); }; var zipWarningCallback = (error2) => { if (error2.code === "ENOENT") { - core17.warning("ENOENT warning during artifact zip creation. No such file or directory"); - core17.info(error2); + core18.warning("ENOENT warning during artifact zip creation. No such file or directory"); + core18.info(error2); } else { - core17.warning(`A non-blocking warning has occurred during artifact zip creation: ${error2.code}`); - core17.info(error2); + core18.warning(`A non-blocking warning has occurred during artifact zip creation: ${error2.code}`); + core18.info(error2); } }; var zipFinishCallback = () => { - core17.debug("Zip stream for upload has finished."); + core18.debug("Zip stream for upload has finished."); }; var zipEndCallback = () => { - core17.debug("Zip stream for upload has ended."); + core18.debug("Zip stream for upload has ended."); }; } }); @@ -110925,7 +110804,7 @@ var require_upload_artifact = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadArtifact = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var retention_1 = require_retention(); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation(); var artifact_twirp_client_1 = require_artifact_twirp_client2(); @@ -110972,13 +110851,13 @@ var require_upload_artifact = __commonJS({ value: `sha256:${uploadResult.sha256Hash}` }); } - core17.info(`Finalizing artifact upload`); + core18.info(`Finalizing artifact upload`); const finalizeArtifactResp = yield artifactClient.FinalizeArtifact(finalizeArtifactReq); if (!finalizeArtifactResp.ok) { throw new errors_1.InvalidResponseError("FinalizeArtifact: response from backend was not ok"); } const artifactId = BigInt(finalizeArtifactResp.artifactId); - core17.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); + core18.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); return { size: uploadResult.uploadSize, digest: uploadResult.sha256Hash, @@ -118126,7 +118005,7 @@ var require_download_artifact = __commonJS({ var crypto = __importStar4(require("crypto")); var stream2 = __importStar4(require("stream")); var github3 = __importStar4(require_github2()); - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var httpClient = __importStar4(require_lib()); var unzip_stream_1 = __importDefault4(require_unzip()); var user_agent_1 = require_user_agent2(); @@ -118162,7 +118041,7 @@ var require_download_artifact = __commonJS({ return yield streamExtractExternal(url, directory); } catch (error2) { retryCount++; - core17.debug(`Failed to download artifact after ${retryCount} retries due to ${error2.message}. Retrying in 5 seconds...`); + core18.debug(`Failed to download artifact after ${retryCount} retries due to ${error2.message}. Retrying in 5 seconds...`); yield new Promise((resolve8) => setTimeout(resolve8, 5e3)); } } @@ -118191,7 +118070,7 @@ var require_download_artifact = __commonJS({ extractStream.on("data", () => { timer.refresh(); }).on("error", (error2) => { - core17.debug(`response.message: Artifact download failed: ${error2.message}`); + core18.debug(`response.message: Artifact download failed: ${error2.message}`); clearTimeout(timer); reject(error2); }).pipe(unzip_stream_1.default.Extract({ path: directory })).on("close", () => { @@ -118199,7 +118078,7 @@ var require_download_artifact = __commonJS({ if (hashStream) { hashStream.end(); sha256Digest = hashStream.read(); - core17.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); + core18.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); } resolve8({ sha256Digest: `sha256:${sha256Digest}` }); }).on("error", (error2) => { @@ -118214,7 +118093,7 @@ var require_download_artifact = __commonJS({ const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path); const api = github3.getOctokit(token); let digestMismatch = false; - core17.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); + core18.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); const { headers, status } = yield api.rest.actions.downloadArtifact({ owner: repositoryOwner, repo: repositoryName, @@ -118231,16 +118110,16 @@ var require_download_artifact = __commonJS({ if (!location) { throw new Error(`Unable to redirect to artifact download url`); } - core17.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); + core18.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); try { - core17.info(`Starting download of artifact to: ${downloadPath}`); + core18.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(location, downloadPath); - core17.info(`Artifact download completed successfully.`); + core18.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core17.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core17.debug(`Expected digest: ${options.expectedHash}`); + core18.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core18.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error2) { @@ -118267,7 +118146,7 @@ var require_download_artifact = __commonJS({ Are you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`); } if (artifacts.length > 1) { - core17.warning("Multiple artifacts found, defaulting to first."); + core18.warning("Multiple artifacts found, defaulting to first."); } const signedReq = { workflowRunBackendId: artifacts[0].workflowRunBackendId, @@ -118275,16 +118154,16 @@ Are you trying to download from a different run? Try specifying a github-token w name: artifacts[0].name }; const { signedUrl } = yield artifactClient.GetSignedArtifactURL(signedReq); - core17.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); + core18.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); try { - core17.info(`Starting download of artifact to: ${downloadPath}`); + core18.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(signedUrl, downloadPath); - core17.info(`Artifact download completed successfully.`); + core18.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core17.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core17.debug(`Expected digest: ${options.expectedHash}`); + core18.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core18.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error2) { @@ -118297,10 +118176,10 @@ Are you trying to download from a different run? Try specifying a github-token w function resolveOrCreateDirectory(downloadPath = (0, config_1.getGitHubWorkspaceDir)()) { return __awaiter4(this, void 0, void 0, function* () { if (!(yield exists(downloadPath))) { - core17.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); + core18.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); yield promises_1.default.mkdir(downloadPath, { recursive: true }); } else { - core17.debug(`Artifact destination folder already exists: ${downloadPath}`); + core18.debug(`Artifact destination folder already exists: ${downloadPath}`); } return downloadPath; }); @@ -118341,7 +118220,7 @@ var require_retry_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRetryOptions = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var defaultMaxRetryNumber = 5; var defaultExemptStatusCodes = [400, 401, 403, 404, 422]; function getRetryOptions(defaultOptions, retries = defaultMaxRetryNumber, exemptStatusCodes = defaultExemptStatusCodes) { @@ -118356,7 +118235,7 @@ var require_retry_options = __commonJS({ retryOptions.doNotRetry = exemptStatusCodes; } const requestOptions = Object.assign(Object.assign({}, defaultOptions.request), { retries }); - core17.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); + core18.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); return [retryOptions, requestOptions]; } exports2.getRetryOptions = getRetryOptions; @@ -118513,7 +118392,7 @@ var require_get_artifact = __commonJS({ exports2.getArtifactInternal = exports2.getArtifactPublic = void 0; var github_1 = require_github2(); var plugin_retry_1 = require_dist_node25(); - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var utils_1 = require_utils13(); var retry_options_1 = require_retry_options(); var plugin_request_log_1 = require_dist_node24(); @@ -118551,7 +118430,7 @@ var require_get_artifact = __commonJS({ let artifact2 = getArtifactResp.data.artifacts[0]; if (getArtifactResp.data.artifacts.length > 1) { artifact2 = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0]; - core17.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); + core18.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); } return { artifact: { @@ -118584,7 +118463,7 @@ var require_get_artifact = __commonJS({ let artifact2 = res.artifacts[0]; if (res.artifacts.length > 1) { artifact2 = res.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0]; - core17.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); + core18.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); } return { artifact: { @@ -120532,7 +120411,7 @@ var require_requestUtils2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientRequest = exports2.retry = void 0; var utils_1 = require_utils14(); - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var config_variables_1 = require_config_variables(); function retry3(name, operation, customErrorMessages, maxAttempts) { return __awaiter4(this, void 0, void 0, function* () { @@ -120559,13 +120438,13 @@ var require_requestUtils2 = __commonJS({ errorMessage = error2.message; } if (!isRetryable) { - core17.info(`${name} - Error is not retryable`); + core18.info(`${name} - Error is not retryable`); if (response) { (0, utils_1.displayHttpDiagnostics)(response); } break; } - core17.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core18.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); yield (0, utils_1.sleep)((0, utils_1.getExponentialRetryTimeInMilliseconds)(attempt)); attempt++; } @@ -120649,7 +120528,7 @@ var require_upload_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadHttpClient = void 0; var fs17 = __importStar4(require("fs")); - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var tmp = __importStar4(require_tmp_promise()); var stream2 = __importStar4(require("stream")); var utils_1 = require_utils14(); @@ -120714,7 +120593,7 @@ var require_upload_http_client = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const FILE_CONCURRENCY = (0, config_variables_1.getUploadFileConcurrency)(); const MAX_CHUNK_SIZE = (0, config_variables_1.getUploadChunkSize)(); - core17.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); + core18.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); const parameters = []; let continueOnError = true; if (options) { @@ -120751,15 +120630,15 @@ var require_upload_http_client = __commonJS({ } const startTime = perf_hooks_1.performance.now(); const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); - if (core17.isDebug()) { - core17.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); + if (core18.isDebug()) { + core18.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); } uploadFileSize += uploadFileResult.successfulUploadSize; totalFileSize += uploadFileResult.totalSize; if (uploadFileResult.isSuccess === false) { failedItemsToReport.push(currentFileParameters.file); if (!continueOnError) { - core17.error(`aborting artifact upload`); + core18.error(`aborting artifact upload`); abortPendingFileUploads = true; } } @@ -120768,7 +120647,7 @@ var require_upload_http_client = __commonJS({ }))); this.statusReporter.stop(); this.uploadHttpManager.disposeAndReplaceAllClients(); - core17.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); + core18.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); return { uploadSize: uploadFileSize, totalSize: totalFileSize, @@ -120794,16 +120673,16 @@ var require_upload_http_client = __commonJS({ let uploadFileSize = 0; let isGzip = true; if (!isFIFO && totalFileSize < 65536) { - core17.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); + core18.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); const buffer = yield (0, upload_gzip_1.createGZipFileInBuffer)(parameters.file); let openUploadStream; if (totalFileSize < buffer.byteLength) { - core17.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core18.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); openUploadStream = () => fs17.createReadStream(parameters.file); isGzip = false; uploadFileSize = totalFileSize; } else { - core17.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); + core18.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); openUploadStream = () => { const passThrough = new stream2.PassThrough(); passThrough.end(buffer); @@ -120815,7 +120694,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += uploadFileSize; - core17.warning(`Aborting upload for ${parameters.file} due to failure`); + core18.warning(`Aborting upload for ${parameters.file} due to failure`); } return { isSuccess: isUploadSuccessful, @@ -120824,16 +120703,16 @@ var require_upload_http_client = __commonJS({ }; } else { const tempFile = yield tmp.file(); - core17.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); + core18.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); uploadFileSize = yield (0, upload_gzip_1.createGZipFileOnDisk)(parameters.file, tempFile.path); let uploadFilePath = tempFile.path; if (!isFIFO && totalFileSize < uploadFileSize) { - core17.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core18.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); uploadFileSize = totalFileSize; uploadFilePath = parameters.file; isGzip = false; } else { - core17.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); + core18.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); } let abortFileUpload = false; while (offset < uploadFileSize) { @@ -120853,7 +120732,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += chunkSize; - core17.warning(`Aborting upload for ${parameters.file} due to failure`); + core18.warning(`Aborting upload for ${parameters.file} due to failure`); abortFileUpload = true; } else { if (uploadFileSize > 8388608) { @@ -120861,7 +120740,7 @@ var require_upload_http_client = __commonJS({ } } } - core17.debug(`deleting temporary gzip file ${tempFile.path}`); + core18.debug(`deleting temporary gzip file ${tempFile.path}`); yield tempFile.cleanup(); return { isSuccess: isUploadSuccessful, @@ -120900,7 +120779,7 @@ var require_upload_http_client = __commonJS({ if (response) { (0, utils_1.displayHttpDiagnostics)(response); } - core17.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); + core18.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); return true; } return false; @@ -120908,14 +120787,14 @@ var require_upload_http_client = __commonJS({ const backOff = (retryAfterValue) => __awaiter4(this, void 0, void 0, function* () { this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core17.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); + core18.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core17.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); + core18.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); yield (0, utils_1.sleep)(backoffTime); } - core17.info(`Finished backoff for retry #${retryCount}, continuing with upload`); + core18.info(`Finished backoff for retry #${retryCount}, continuing with upload`); return; }); while (retryCount <= retryLimit) { @@ -120923,7 +120802,7 @@ var require_upload_http_client = __commonJS({ try { response = yield uploadChunkRequest(); } catch (error2) { - core17.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); + core18.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); console.log(error2); if (incrementAndCheckRetryLimit()) { return false; @@ -120935,13 +120814,13 @@ var require_upload_http_client = __commonJS({ if ((0, utils_1.isSuccessStatusCode)(response.message.statusCode)) { return true; } else if ((0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core17.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); + core18.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); if (incrementAndCheckRetryLimit(response)) { return false; } (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { - core17.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); + core18.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); (0, utils_1.displayHttpDiagnostics)(response); return false; } @@ -120959,7 +120838,7 @@ var require_upload_http_client = __commonJS({ resourceUrl.searchParams.append("artifactName", artifactName); const parameters = { Size: size }; const data = JSON.stringify(parameters, null, 2); - core17.debug(`URL is ${resourceUrl.toString()}`); + core18.debug(`URL is ${resourceUrl.toString()}`); const client = this.uploadHttpManager.getClient(0); const headers = (0, utils_1.getUploadHeaders)("application/json", false); const customErrorMessages = /* @__PURE__ */ new Map([ @@ -120972,7 +120851,7 @@ var require_upload_http_client = __commonJS({ return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); yield response.readBody(); - core17.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); + core18.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); }); } }; @@ -121041,7 +120920,7 @@ var require_download_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DownloadHttpClient = void 0; var fs17 = __importStar4(require("fs")); - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var zlib3 = __importStar4(require("zlib")); var utils_1 = require_utils14(); var url_1 = require("url"); @@ -121095,11 +120974,11 @@ var require_download_http_client = __commonJS({ downloadSingleArtifact(downloadItems) { return __awaiter4(this, void 0, void 0, function* () { const DOWNLOAD_CONCURRENCY = (0, config_variables_1.getDownloadFileConcurrency)(); - core17.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); + core18.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; let currentFile = 0; let downloadedFiles = 0; - core17.info(`Total number of files that will be downloaded: ${downloadItems.length}`); + core18.info(`Total number of files that will be downloaded: ${downloadItems.length}`); this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length); this.statusReporter.start(); yield Promise.all(parallelDownloads.map((index) => __awaiter4(this, void 0, void 0, function* () { @@ -121108,8 +120987,8 @@ var require_download_http_client = __commonJS({ currentFile += 1; const startTime = perf_hooks_1.performance.now(); yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); - if (core17.isDebug()) { - core17.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); + if (core18.isDebug()) { + core18.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); } this.statusReporter.incrementProcessedCount(); } @@ -121147,19 +121026,19 @@ var require_download_http_client = __commonJS({ } else { this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core17.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); + core18.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core17.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); + core18.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); yield (0, utils_1.sleep)(backoffTime); } - core17.info(`Finished backoff for retry #${retryCount}, continuing with download`); + core18.info(`Finished backoff for retry #${retryCount}, continuing with download`); } }); const isAllBytesReceived = (expected, received) => { if (!expected || !received || process.env["ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION"]) { - core17.info("Skipping download validation."); + core18.info("Skipping download validation."); return true; } return parseInt(expected) === received; @@ -121180,7 +121059,7 @@ var require_download_http_client = __commonJS({ try { response = yield makeDownloadRequest(); } catch (error2) { - core17.info("An error occurred while attempting to download a file"); + core18.info("An error occurred while attempting to download a file"); console.log(error2); yield backOff(); continue; @@ -121200,7 +121079,7 @@ var require_download_http_client = __commonJS({ } } if (forceRetry || (0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core17.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); + core18.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); resetDestinationStream(downloadPath); (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { @@ -121222,29 +121101,29 @@ var require_download_http_client = __commonJS({ if (isGzip) { const gunzip = zlib3.createGunzip(); response.message.on("error", (error2) => { - core17.info(`An error occurred while attempting to read the response stream`); + core18.info(`An error occurred while attempting to read the response stream`); gunzip.close(); destinationStream.close(); reject(error2); }).pipe(gunzip).on("error", (error2) => { - core17.info(`An error occurred while attempting to decompress the response stream`); + core18.info(`An error occurred while attempting to decompress the response stream`); destinationStream.close(); reject(error2); }).pipe(destinationStream).on("close", () => { resolve8(); }).on("error", (error2) => { - core17.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core18.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error2); }); } else { response.message.on("error", (error2) => { - core17.info(`An error occurred while attempting to read the response stream`); + core18.info(`An error occurred while attempting to read the response stream`); destinationStream.close(); reject(error2); }).pipe(destinationStream).on("close", () => { resolve8(); }).on("error", (error2) => { - core17.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core18.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error2); }); } @@ -121383,7 +121262,7 @@ var require_artifact_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultArtifactClient = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var upload_specification_1 = require_upload_specification(); var upload_http_client_1 = require_upload_http_client(); var utils_1 = require_utils14(); @@ -121404,7 +121283,7 @@ var require_artifact_client = __commonJS({ */ uploadArtifact(name, files, rootDirectory, options) { return __awaiter4(this, void 0, void 0, function* () { - core17.info(`Starting artifact upload + core18.info(`Starting artifact upload For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`); (0, path_and_artifact_name_validation_1.checkArtifactName)(name); const uploadSpecification = (0, upload_specification_1.getUploadSpecification)(name, rootDirectory, files); @@ -121416,24 +121295,24 @@ For more detailed logs during the artifact upload process, enable step-debugging }; const uploadHttpClient = new upload_http_client_1.UploadHttpClient(); if (uploadSpecification.length === 0) { - core17.warning(`No files found that can be uploaded`); + core18.warning(`No files found that can be uploaded`); } else { const response = yield uploadHttpClient.createArtifactInFileContainer(name, options); if (!response.fileContainerResourceUrl) { - core17.debug(response.toString()); + core18.debug(response.toString()); throw new Error("No URL provided by the Artifact Service to upload an artifact to"); } - core17.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); - core17.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); + core18.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); + core18.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); - core17.info(`File upload process has finished. Finalizing the artifact upload`); + core18.info(`File upload process has finished. Finalizing the artifact upload`); yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); if (uploadResult.failedItems.length > 0) { - core17.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); + core18.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); } else { - core17.info(`Artifact has been finalized. All files have been successfully uploaded!`); + core18.info(`Artifact has been finalized. All files have been successfully uploaded!`); } - core17.info(` + core18.info(` The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage @@ -121467,10 +121346,10 @@ Note: The size of downloaded zips can differ significantly from the reported siz path15 = (0, path_1.resolve)(path15); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path15, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { - core17.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); + core18.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); - core17.info("Directory structure has been set up for the artifact"); + core18.info("Directory structure has been set up for the artifact"); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); } @@ -121486,7 +121365,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz const response = []; const artifacts = yield downloadHttpClient.listArtifacts(); if (artifacts.count === 0) { - core17.info("Unable to find any artifacts for the associated workflow"); + core18.info("Unable to find any artifacts for the associated workflow"); return response; } if (!path15) { @@ -121498,11 +121377,11 @@ Note: The size of downloaded zips can differ significantly from the reported siz while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; - core17.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); + core18.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path15, true); if (downloadSpecification.filesToDownload.length === 0) { - core17.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); + core18.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); @@ -121568,7 +121447,7 @@ var require_internal_glob_options_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -121580,23 +121459,23 @@ var require_internal_glob_options_helper2 = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core17.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core18.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core17.debug(`implicitDescendants '${result.implicitDescendants}'`); + core18.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core17.debug(`matchDirectories '${result.matchDirectories}'`); + core18.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core17.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core18.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core17.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core18.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -122280,7 +122159,7 @@ var require_internal_globber2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var fs17 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); var path15 = __importStar4(require("path")); @@ -122333,7 +122212,7 @@ var require_internal_globber2 = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core17.debug(`Search path '${searchPath}'`); + core18.debug(`Search path '${searchPath}'`); try { yield __await4(fs17.promises.lstat(searchPath)); } catch (err) { @@ -122408,7 +122287,7 @@ var require_internal_globber2 = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core17.debug(`Broken symlink '${item.path}'`); + core18.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -122424,7 +122303,7 @@ var require_internal_globber2 = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core17.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core18.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -122517,7 +122396,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; var crypto = __importStar4(require("crypto")); - var core17 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var fs17 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); var util = __importStar4(require("util")); @@ -122526,7 +122405,7 @@ var require_internal_hash_files = __commonJS({ var _a, e_1, _b, _c; var _d; return __awaiter4(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core17.info : core17.debug; + const writeDelegate = verbose ? core18.info : core18.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto.createHash("sha256"); @@ -126852,12 +126731,12 @@ var require_sarif_schema_2_1_0 = __commonJS({ }); // src/init-action-post.ts -var core16 = __toESM(require_core()); +var core17 = __toESM(require_core()); // src/actions-util.ts var fs2 = __toESM(require("fs")); var path2 = __toESM(require("path")); -var core3 = __toESM(require_core()); +var core4 = __toESM(require_core()); var toolrunner = __toESM(require_toolrunner()); var github = __toESM(require_github()); var io2 = __toESM(require_io()); @@ -126865,7 +126744,7 @@ var io2 = __toESM(require_io()); // src/util.ts var fs = __toESM(require("fs")); var path = __toESM(require("path")); -var core2 = __toESM(require_core()); +var core3 = __toESM(require_core()); var exec = __toESM(require_exec()); var io = __toESM(require_io()); @@ -127006,7 +126885,61 @@ function checkDiskSpace(directoryPath, dependencies = { // src/util.ts var import_del = __toESM(require_del()); -var import_get_folder_size = __toESM(require_get_folder_size()); + +// node_modules/get-folder-size/index.js +var import_node_path2 = require("node:path"); +async function getFolderSize(itemPath, options) { + return await core(itemPath, options, { errors: true }); +} +getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); +getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); +async function core(rootItemPath, options = {}, returnType = {}) { + const fs17 = options.fs || await import("node:fs/promises"); + let folderSize = 0n; + const foundInos = /* @__PURE__ */ new Set(); + const errors = []; + await processItem(rootItemPath); + async function processItem(itemPath) { + if (options.ignore?.test(itemPath)) return; + const stats = returnType.strict ? await fs17.lstat(itemPath, { bigint: true }) : await fs17.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + if (typeof stats !== "object") return; + if (!foundInos.has(stats.ino)) { + foundInos.add(stats.ino); + folderSize += stats.size; + } + if (stats.isDirectory()) { + const directoryItems = returnType.strict ? await fs17.readdir(itemPath) : await fs17.readdir(itemPath).catch((error2) => errors.push(error2)); + if (typeof directoryItems !== "object") return; + await Promise.all( + directoryItems.map( + (directoryItem) => processItem((0, import_node_path2.join)(itemPath, directoryItem)) + ) + ); + } + } + if (!options.bigint) { + if (folderSize > BigInt(Number.MAX_SAFE_INTEGER)) { + const error2 = new RangeError( + "The folder size is too large to return as a Number. You can instruct this package to return a BigInt instead." + ); + if (returnType.strict) { + throw error2; + } + errors.push(error2); + folderSize = Number.MAX_SAFE_INTEGER; + } else { + folderSize = Number(folderSize); + } + } + if (returnType.errors) { + return { + size: folderSize, + errors: errors.length > 0 ? errors : null + }; + } else { + return folderSize; + } +} // node_modules/js-yaml/dist/js-yaml.mjs function isNothing(subject) { @@ -127577,7 +127510,7 @@ var json = failsafe.extend({ float ] }); -var core = json; +var core2 = json; var YAML_DATE_REGEXP = new RegExp( "^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$" ); @@ -127791,7 +127724,7 @@ var set = new type("tag:yaml.org,2002:set", { resolve: resolveYamlSet, construct: constructYamlSet }); -var _default = core.extend({ +var _default = core2.extend({ implicit: [ timestamp, merge @@ -129681,7 +129614,7 @@ function checkGitHubVersionInRange(version, logger) { ); } hasBeenWarnedAboutVersion = true; - core2.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true); + core3.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true); } function apiVersionInRange(version, minimumVersion2, maximumVersion2) { if (!semver.satisfies(version, `>=${minimumVersion2}`)) { @@ -129812,7 +129745,7 @@ async function checkDiskUsage(logger) { } else { logger.debug(message); } - core2.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true"); + core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true"); } return { numAvailableBytes: diskUsage.free, @@ -129846,13 +129779,13 @@ async function checkSipEnablement(logger) { if (sipStatusOutput.stdout.includes( "System Integrity Protection status: enabled." )) { - core2.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true"); + core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true"); return true; } if (sipStatusOutput.stdout.includes( "System Integrity Protection status: disabled." )) { - core2.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false"); + core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false"); return false; } } @@ -129899,14 +129832,14 @@ async function asyncSome(array, predicate) { // src/actions-util.ts var pkg = require_package(); var getRequiredInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); if (!value) { throw new ConfigurationError(`Input required and not supplied: ${name}`); } return value; }; var getOptionalInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); return value.length > 0 ? value : void 0; }; function getTemporaryDirectory() { @@ -129943,22 +129876,22 @@ async function printDebugLogs(config) { const databaseDirectory = getCodeQLDatabasePath(config, language); const logsDirectory = path2.join(databaseDirectory, "log"); if (!doesDirectoryExist(logsDirectory)) { - core3.info(`Directory ${logsDirectory} does not exist.`); + core4.info(`Directory ${logsDirectory} does not exist.`); continue; } const walkLogFiles = (dir) => { const entries = fs2.readdirSync(dir, { withFileTypes: true }); if (entries.length === 0) { - core3.info(`No debug logs found at directory ${logsDirectory}.`); + core4.info(`No debug logs found at directory ${logsDirectory}.`); } for (const entry of entries) { if (entry.isFile()) { const absolutePath = path2.resolve(dir, entry.name); - core3.startGroup( + core4.startGroup( `CodeQL Debug Logs - ${language} - ${entry.name} from file at path ${absolutePath}` ); process.stdout.write(fs2.readFileSync(absolutePath)); - core3.endGroup(); + core4.endGroup(); } else if (entry.isDirectory()) { walkLogFiles(path2.resolve(dir, entry.name)); } @@ -129979,7 +129912,7 @@ function getUploadValue(input) { case "never": return "never"; default: - core3.warning( + core4.warning( `Unrecognized 'upload' input to 'analyze' Action: ${input}. Defaulting to 'always'.` ); return "always"; @@ -130076,7 +130009,7 @@ async function runTool(cmd, args = [], opts = {}) { } var persistedInputsKey = "persisted_inputs"; var restoreInputs = function() { - const persistedInputs = core3.getState(persistedInputsKey); + const persistedInputs = core4.getState(persistedInputsKey); if (persistedInputs) { for (const [name, value] of JSON.parse(persistedInputs)) { process.env[name] = value; @@ -130085,7 +130018,7 @@ var restoreInputs = function() { }; // src/api-client.ts -var core4 = __toESM(require_core()); +var core5 = __toESM(require_core()); var githubUtils = __toESM(require_utils4()); var retry = __toESM(require_dist_node15()); var import_console_log_level = __toESM(require_console_log_level()); @@ -130190,7 +130123,7 @@ async function getAnalysisKey() { const workflowPath = await getWorkflowRelativePath(); const jobName = getRequiredEnvParam("GITHUB_JOB"); analysisKey = `${workflowPath}:${jobName}`; - core4.exportVariable(analysisKeyEnvVar, analysisKey); + core5.exportVariable(analysisKeyEnvVar, analysisKey); return analysisKey; } function computeAutomationID(analysis_key, environment) { @@ -130223,7 +130156,7 @@ function wrapApiConfigurationError(e) { // src/codeql.ts var fs10 = __toESM(require("fs")); var path9 = __toESM(require("path")); -var core9 = __toESM(require_core()); +var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); // src/cli-errors.ts @@ -130477,7 +130410,7 @@ var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { var supportedAnalysisKinds = new Set(Object.values(AnalysisKind)); // src/caching-utils.ts -var core5 = __toESM(require_core()); +var core6 = __toESM(require_core()); // src/diff-informed-analysis-utils.ts var fs5 = __toESM(require("fs")); @@ -130498,13 +130431,13 @@ var path3 = __toESM(require("path")); var actionsCache = __toESM(require_cache3()); // src/git-utils.ts -var core6 = __toESM(require_core()); +var core7 = __toESM(require_core()); var toolrunner2 = __toESM(require_toolrunner()); var io3 = __toESM(require_io()); var runGitCommand = async function(workingDirectory, args, customErrorMessage) { let stdout = ""; let stderr = ""; - core6.debug(`Running git command: git ${args.join(" ")}`); + core7.debug(`Running git command: git ${args.join(" ")}`); try { await new toolrunner2.ToolRunner(await io3.which("git", true), args, { silent: true, @@ -130524,7 +130457,7 @@ var runGitCommand = async function(workingDirectory, args, customErrorMessage) { if (stderr.includes("not a git repository")) { reason = "The checkout path provided to the action does not appear to be a git repository."; } - core6.info(`git call failed. ${customErrorMessage} Error: ${reason}`); + core7.info(`git call failed. ${customErrorMessage} Error: ${reason}`); throw error2; } }; @@ -130669,7 +130602,7 @@ async function getRef() { ) !== head; if (hasChangedRef) { const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head"); - core6.debug( + core7.debug( `No longer on merge commit, rewriting ref from ${ref} to ${newRef}.` ); return newRef; @@ -130695,16 +130628,16 @@ async function isAnalyzingDefaultBranch() { } // src/logging.ts -var core7 = __toESM(require_core()); +var core8 = __toESM(require_core()); function getActionsLogger() { - return core7; + return core8; } function withGroup(groupName, f) { - core7.startGroup(groupName); + core8.startGroup(groupName); try { return f(); } finally { - core7.endGroup(); + core8.endGroup(); } } function formatDuration(durationMs) { @@ -131560,7 +131493,7 @@ var fs8 = __toESM(require("fs")); var os = __toESM(require("os")); var path7 = __toESM(require("path")); var import_perf_hooks = require("perf_hooks"); -var core8 = __toESM(require_core()); +var core9 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); var toolcache2 = __toESM(require_tool_cache()); var import_follow_redirects = __toESM(require_follow_redirects()); @@ -131614,10 +131547,10 @@ async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorizat }; } } catch (e) { - core8.warning( + core9.warning( `Failed to download and extract CodeQL bundle using streaming with error: ${getErrorMessage(e)}` ); - core8.warning(`Falling back to downloading the bundle before extracting.`); + core9.warning(`Falling back to downloading the bundle before extracting.`); await cleanUpGlob(dest, "CodeQL bundle", logger); } const toolsDownloadStart = import_perf_hooks.performance.now(); @@ -132665,12 +132598,12 @@ ${output}` ); } else if (checkVersion && process.env["CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */] !== "true" && !await codeQlVersionAtLeast(codeql, CODEQL_NEXT_MINIMUM_VERSION)) { const result = await codeql.getVersion(); - core9.warning( + core10.warning( `CodeQL CLI version ${result.version} was discontinued on ${GHES_MOST_RECENT_DEPRECATION_DATE} alongside GitHub Enterprise Server ${GHES_VERSION_MOST_RECENTLY_DEPRECATED} and will not be supported by the next minor release of the CodeQL Action. Please update to CodeQL CLI version ${CODEQL_NEXT_MINIMUM_VERSION} or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version. Alternatively, if you want to continue using CodeQL CLI version ${result.version}, you can replace 'github/codeql-action/*@v${getActionVersion().split(".")[0]}' by 'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to continue using this version of the CodeQL Action.` ); - core9.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); + core10.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); } return codeql; } @@ -132780,7 +132713,7 @@ var fs12 = __toESM(require("fs")); var path11 = __toESM(require("path")); var artifact = __toESM(require_artifact2()); var artifactLegacy = __toESM(require_artifact_client2()); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); var import_archiver = __toESM(require_archiver()); var import_del3 = __toESM(require_del()); @@ -132791,7 +132724,7 @@ var io5 = __toESM(require_io()); var import_del2 = __toESM(require_del()); // src/autobuild.ts -var core10 = __toESM(require_core()); +var core11 = __toESM(require_core()); // src/dependency-caching.ts var actionsCache3 = __toESM(require_cache3()); @@ -132938,7 +132871,7 @@ async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghV } const uploadSupported = isSafeArtifactUpload(codeQlVersion); if (!uploadSupported) { - core11.info( + core12.info( `Skipping debug artifact upload because the current CLI does not support safe upload. Please upgrade to CLI v${SafeArtifactUploadVersion} or later.` ); return "upload-not-supported"; @@ -132952,7 +132885,7 @@ async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghV ).sort()) suffix += `-${matrixVal}`; } catch { - core11.info( + core12.info( "Could not parse user-specified `matrix` input into JSON. The debug artifact will not be named with the user's `matrix` input." ); } @@ -132970,7 +132903,7 @@ async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghV ); return "upload-successful"; } catch (e) { - core11.warning(`Failed to upload debug artifacts: ${e}`); + core12.warning(`Failed to upload debug artifacts: ${e}`); return "upload-failed"; } } @@ -132993,7 +132926,7 @@ async function createPartialDatabaseBundle(config, language) { config.dbLocation, `${config.debugDatabaseName}-${language}-partial.zip` ); - core11.info( + core12.info( `${config.debugDatabaseName}-${language} is not finalized. Uploading partial database bundle at ${databaseBundlePath}...` ); if (fs12.existsSync(databaseBundlePath)) { @@ -133026,12 +132959,12 @@ async function createDatabaseBundleCli(codeql, config, language) { // src/init-action-post-helper.ts var fs16 = __toESM(require("fs")); -var core15 = __toESM(require_core()); +var core16 = __toESM(require_core()); var github2 = __toESM(require_github()); // src/status-report.ts var os2 = __toESM(require("os")); -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -133068,12 +133001,12 @@ function getJobStatusDisplayName(status) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core12.exportVariable( + core13.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core12.exportVariable( + core13.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -133092,14 +133025,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core12.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core13.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core12.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core13.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -133177,9 +133110,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scan async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core12.debug(`Sending status report: ${statusReportJSON}`); + core13.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core12.debug("In test mode. Status reports are not uploaded."); + core13.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -133198,26 +133131,26 @@ async function sendStatusReport(statusReport) { switch (e.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core12.warning( + core13.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading Code Scanning results requires write access. To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core12.warning(e.message); + core13.warning(e.message); } return; case 404: - core12.warning(e.message); + core13.warning(e.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core12.debug(INCOMPATIBLE_MSG); + core13.debug(INCOMPATIBLE_MSG); } else { - core12.debug(OUT_OF_DATE_MSG); + core13.debug(OUT_OF_DATE_MSG); } return; } } - core12.warning( + core13.warning( `An unexpected error occurred when sending code scanning status report: ${getErrorMessage( e )}` @@ -133229,7 +133162,7 @@ async function sendStatusReport(statusReport) { var fs14 = __toESM(require("fs")); var path13 = __toESM(require("path")); var import_zlib = __toESM(require("zlib")); -var core13 = __toESM(require_core()); +var core14 = __toESM(require_core()); var import_file_url = __toESM(require_file_url()); var jsonschema = __toESM(require_lib5()); @@ -134486,7 +134419,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core14.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -134536,7 +134469,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple CodeQL runs with the same category is deprecated ${deprecationWarningMessage} for CodeQL CLI 2.16.6 and earlier. Please update your CodeQL CLI version or update your workflow to set a distinct category for each CodeQL run. ${deprecationMoreInformationMessage}` ); - core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core14.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -134601,13 +134534,13 @@ async function uploadPayload(payload, repositoryNwo, logger, target) { if (isHTTPError(e)) { switch (e.status) { case 403: - core13.warning(e.message || GENERIC_403_MSG); + core14.warning(e.message || GENERIC_403_MSG); break; case 404: - core13.warning(e.message || GENERIC_404_MSG); + core14.warning(e.message || GENERIC_404_MSG); break; default: - core13.warning(e.message); + core14.warning(e.message); break; } } @@ -134953,7 +134886,7 @@ function validateUniqueCategory(sarif, sentinelPrefix = CodeScanningTarget.senti `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core13.exportVariable(sentinelEnvVar, sentinelEnvVar); + core14.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -134998,7 +134931,7 @@ function filterAlertsByDiffRange(logger, sarif) { var fs15 = __toESM(require("fs")); var path14 = __toESM(require("path")); var import_zlib2 = __toESM(require("zlib")); -var core14 = __toESM(require_core()); +var core15 = __toESM(require_core()); function toCodedErrors(errors) { return Object.entries(errors).reduce( (acc, [code, message]) => { @@ -135175,7 +135108,7 @@ async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { } async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger) { if (process.env["CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */] !== "true") { - core15.exportVariable( + core16.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); @@ -135193,7 +135126,7 @@ async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger return createFailedUploadFailedSarifResult(e); } } else { - core15.exportVariable( + core16.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_SUCCESS" /* SuccessStatus */ ); @@ -135367,7 +135300,7 @@ async function runWrapper() { } } catch (unwrappedError) { const error2 = wrapError(unwrappedError); - core16.setFailed(error2.message); + core17.setFailed(error2.message); const statusReportBase2 = await createStatusReportBase( "init-post" /* InitPost */, getActionsStatus(error2), diff --git a/lib/init-action.js b/lib/init-action.js index 051cbe02d..b0e4427f4 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -10754,7 +10754,7 @@ var require_mock_interceptor = __commonJS({ var require_mock_client = __commonJS({ "node_modules/undici/lib/mock/mock-client.js"(exports2, module2) { "use strict"; - var { promisify: promisify3 } = require("util"); + var { promisify: promisify2 } = require("util"); var Client = require_client(); var { buildMockDispatch } = require_mock_utils(); var { @@ -10794,7 +10794,7 @@ var require_mock_client = __commonJS({ return new MockInterceptor(opts, this[kDispatches]); } async [kClose]() { - await promisify3(this[kOriginalClose])(); + await promisify2(this[kOriginalClose])(); this[kConnected] = 0; this[kMockAgent][Symbols.kClients].delete(this[kOrigin]); } @@ -10807,7 +10807,7 @@ var require_mock_client = __commonJS({ var require_mock_pool = __commonJS({ "node_modules/undici/lib/mock/mock-pool.js"(exports2, module2) { "use strict"; - var { promisify: promisify3 } = require("util"); + var { promisify: promisify2 } = require("util"); var Pool = require_pool(); var { buildMockDispatch } = require_mock_utils(); var { @@ -10847,7 +10847,7 @@ var require_mock_pool = __commonJS({ return new MockInterceptor(opts, this[kDispatches]); } async [kClose]() { - await promisify3(this[kOriginalClose])(); + await promisify2(this[kOriginalClose])(); this[kConnected] = 0; this[kMockAgent][Symbols.kClients].delete(this[kOrigin]); } @@ -31839,14 +31839,14 @@ var require_out4 = __commonJS({ var require_path_type = __commonJS({ "node_modules/path-type/index.js"(exports2) { "use strict"; - var { promisify: promisify3 } = require("util"); + var { promisify: promisify2 } = require("util"); var fs15 = require("fs"); async function isType(fsStatType, statsMethodName, filePath) { if (typeof filePath !== "string") { throw new TypeError(`Expected a string, got ${typeof filePath}`); } try { - const stats = await promisify3(fs15[fsStatType])(filePath); + const stats = await promisify2(fs15[fsStatType])(filePath); return stats[statsMethodName](); } catch (error2) { if (error2.code === "ENOENT") { @@ -32339,7 +32339,7 @@ var require_slash = __commonJS({ var require_gitignore = __commonJS({ "node_modules/globby/gitignore.js"(exports2, module2) { "use strict"; - var { promisify: promisify3 } = require("util"); + var { promisify: promisify2 } = require("util"); var fs15 = require("fs"); var path15 = require("path"); var fastGlob = require_out4(); @@ -32351,7 +32351,7 @@ var require_gitignore = __commonJS({ "**/coverage/**", "**/.git" ]; - var readFileP = promisify3(fs15.readFile); + var readFileP = promisify2(fs15.readFile); var mapGitIgnorePatternTo = (base) => (ignore) => { if (ignore.startsWith("!")) { return "!" + path15.posix.join(base, ignore.slice(1)); @@ -36135,7 +36135,7 @@ var require_p_map = __commonJS({ var require_del = __commonJS({ "node_modules/del/index.js"(exports2, module2) { "use strict"; - var { promisify: promisify3 } = require("util"); + var { promisify: promisify2 } = require("util"); var path15 = require("path"); var globby = require_globby(); var isGlob = require_is_glob(); @@ -36145,7 +36145,7 @@ var require_del = __commonJS({ var isPathInside = require_is_path_inside(); var rimraf = require_rimraf(); var pMap = require_p_map(); - var rimrafP = promisify3(rimraf); + var rimrafP = promisify2(rimraf); var rimrafOptions = { glob: false, unlink: gracefulFs.unlink, @@ -36244,127 +36244,6 @@ var require_del = __commonJS({ } }); -// node_modules/tiny-each-async/index.js -var require_tiny_each_async = __commonJS({ - "node_modules/tiny-each-async/index.js"(exports2, module2) { - "use strict"; - module2.exports = function eachAsync(arr, parallelLimit, iteratorFn, cb) { - var pending = 0; - var index = 0; - var lastIndex = arr.length - 1; - var called = false; - var limit; - var callback; - var iterate; - if (typeof parallelLimit === "number") { - limit = parallelLimit; - iterate = iteratorFn; - callback = cb || function noop() { - }; - } else { - iterate = parallelLimit; - callback = iteratorFn || function noop() { - }; - limit = arr.length; - } - if (!arr.length) { - return callback(); - } - var iteratorLength = iterate.length; - var shouldCallNextIterator = function shouldCallNextIterator2() { - return !called && pending < limit && index < lastIndex; - }; - var iteratorCallback = function iteratorCallback2(err) { - if (called) { - return; - } - pending--; - if (err || index === lastIndex && !pending) { - called = true; - callback(err); - } else if (shouldCallNextIterator()) { - processIterator(++index); - } - }; - var processIterator = function processIterator2() { - pending++; - var args = iteratorLength === 2 ? [arr[index], iteratorCallback] : [arr[index], index, iteratorCallback]; - iterate.apply(null, args); - if (shouldCallNextIterator()) { - processIterator2(++index); - } - }; - processIterator(); - }; - } -}); - -// node_modules/get-folder-size/index.js -var require_get_folder_size = __commonJS({ - "node_modules/get-folder-size/index.js"(exports2, module2) { - "use strict"; - var fs15 = require("fs"); - var path15 = require("path"); - var eachAsync = require_tiny_each_async(); - function readSizeRecursive(seen, item, ignoreRegEx, callback) { - let cb; - let ignoreRegExp; - if (!callback) { - cb = ignoreRegEx; - ignoreRegExp = null; - } else { - cb = callback; - ignoreRegExp = ignoreRegEx; - } - fs15.lstat(item, function lstat(e, stats) { - let total = !e ? stats.size || 0 : 0; - if (stats) { - if (seen.has(stats.ino)) { - return cb(null, 0); - } - seen.add(stats.ino); - } - if (!e && stats.isDirectory()) { - fs15.readdir(item, (err, list) => { - if (err) { - return cb(err); - } - eachAsync( - list, - 5e3, - (dirItem, next) => { - readSizeRecursive( - seen, - path15.join(item, dirItem), - ignoreRegExp, - (error2, size) => { - if (!error2) { - total += size; - } - next(error2); - } - ); - }, - (finalErr) => { - cb(finalErr, total); - } - ); - }); - } else { - if (ignoreRegExp && ignoreRegExp.test(item)) { - total = 0; - } - cb(e, total); - } - }); - } - module2.exports = (...args) => { - args.unshift(/* @__PURE__ */ new Set()); - return readSizeRecursive(...args); - }; - } -}); - // package.json var require_package = __commonJS({ "package.json"(exports2, module2) { @@ -36412,7 +36291,7 @@ var require_package = __commonJS({ "fast-deep-equal": "^3.1.3", "file-url": "^3.0.0", "follow-redirects": "^1.15.11", - "get-folder-size": "^2.0.1", + "get-folder-size": "^5.0.0", "js-yaml": "^4.1.0", jsonschema: "1.4.1", long: "^5.3.2", @@ -38076,7 +37955,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -38086,15 +37965,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core13.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -38758,7 +38637,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs15 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path15 = __importStar4(require("path")); @@ -38809,7 +38688,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core13.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await4(fs15.promises.lstat(searchPath)); } catch (err) { @@ -38881,7 +38760,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core13.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -38897,7 +38776,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -40221,7 +40100,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob2 = __importStar4(require_glob2()); var io7 = __importStar4(require_io()); @@ -40274,7 +40153,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path15.relative(workspace, file).replace(new RegExp(`\\${path15.sep}`, "g"), "/"); - core13.debug(`Matched: ${relativeFile}`); + core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -40304,7 +40183,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core13.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -40315,10 +40194,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core13.debug(err.message); + core14.debug(err.message); } versionOutput = versionOutput.trim(); - core13.debug(versionOutput); + core14.debug(versionOutput); return versionOutput; }); } @@ -40326,7 +40205,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver9.clean(versionOutput); - core13.debug(`zstd version: ${version}`); + core14.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -75638,7 +75517,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -75680,7 +75559,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -75735,14 +75614,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core13.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core13.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -75813,7 +75692,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -75874,9 +75753,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core13.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core13.debug(`${name} - Error is not retryable`); + core14.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -75981,7 +75860,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -76019,7 +75898,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core13.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -76053,7 +75932,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -76103,7 +75982,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core13.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -76114,7 +75993,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core13.debug("Unable to validate download, no Content-Length header"); + core14.debug("Unable to validate download, no Content-Length header"); } }); } @@ -76234,7 +76113,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core13.debug("Unable to determine content length, downloading file with http-client..."); + core14.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -76314,7 +76193,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -76334,9 +76213,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core13.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -76373,12 +76252,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Download concurrency: ${result.downloadConcurrency}`); - core13.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core13.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core13.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core13.debug(`Lookup only: ${result.lookupOnly}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Download concurrency: ${result.downloadConcurrency}`); + core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core14.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -76558,7 +76437,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs15 = __importStar4(require("fs")); @@ -76576,7 +76455,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core13.debug(`Resource Url: ${url}`); + core14.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -76604,7 +76483,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core13.isDebug()) { + if (core14.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -76617,9 +76496,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core13.setSecret(cacheDownloadUrl); - core13.debug(`Cache Result:`); - core13.debug(JSON.stringify(cacheResult)); + core14.setSecret(cacheDownloadUrl); + core14.debug(`Cache Result:`); + core14.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -76634,10 +76513,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core13.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core13.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -76682,7 +76561,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core13.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -76704,7 +76583,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core13.debug("Awaiting all uploads"); + core14.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -76747,16 +76626,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core13.debug("Upload cache"); + core14.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core13.debug("Commiting cache"); + core14.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core13.info("Cache saved successfully"); + core14.info("Cache saved successfully"); } }); } @@ -82182,7 +82061,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var path15 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -82235,7 +82114,7 @@ var require_cache3 = __commonJS({ function restoreCache4(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -82251,8 +82130,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82270,19 +82149,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path15.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -82290,16 +82169,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core13.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82310,8 +82189,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82329,30 +82208,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core13.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core14.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core13.info(`Cache hit for restore-key: ${response.matchedKey}`); + core14.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core13.info(`Cache hit for: ${response.matchedKey}`); + core14.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path15.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive path: ${archivePath}`); - core13.debug(`Starting download of archive to: ${archivePath}`); + core14.debug(`Archive path: ${archivePath}`); + core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core13.isDebug()) { + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -82360,9 +82239,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core13.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -82371,7 +82250,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82380,7 +82259,7 @@ var require_cache3 = __commonJS({ function saveCache4(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -82399,26 +82278,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path15.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -82431,26 +82310,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core13.debug(`Saving Cache (ID: ${cacheId})`); + core14.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -82463,26 +82342,26 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path15.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } options.archiveSizeBytes = archiveFileSize; - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -82496,10 +82375,10 @@ var require_cache3 = __commonJS({ } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core13.debug(`Failed to reserve cache: ${error2}`); + core14.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core13.debug(`Attempting to upload cache located at: ${archivePath}`); + core14.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -82507,7 +82386,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core13.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); } @@ -82517,19 +82396,19 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -82571,7 +82450,7 @@ var require_internal_glob_options_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -82583,23 +82462,23 @@ var require_internal_glob_options_helper2 = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core13.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core13.debug(`matchDirectories '${result.matchDirectories}'`); + core14.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core13.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -83283,7 +83162,7 @@ var require_internal_globber2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs15 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); var path15 = __importStar4(require("path")); @@ -83336,7 +83215,7 @@ var require_internal_globber2 = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core13.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await4(fs15.promises.lstat(searchPath)); } catch (err) { @@ -83411,7 +83290,7 @@ var require_internal_globber2 = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core13.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -83427,7 +83306,7 @@ var require_internal_globber2 = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -83520,7 +83399,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; var crypto = __importStar4(require("crypto")); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs15 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); var util = __importStar4(require("util")); @@ -83529,7 +83408,7 @@ var require_internal_hash_files = __commonJS({ var _a, e_1, _b, _c; var _d; return __awaiter4(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core13.info : core13.debug; + const writeDelegate = verbose ? core14.info : core14.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto.createHash("sha256"); @@ -83834,7 +83713,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -83857,10 +83736,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core13.info(err.message); + core14.info(err.message); } const seconds = this.getSleepAmount(); - core13.info(`Waiting ${seconds} seconds before trying again`); + core14.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -83940,7 +83819,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var io7 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs15 = __importStar4(require("fs")); @@ -83969,8 +83848,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path15.join(_getTempDirectory(), crypto.randomUUID()); yield io7.mkdirP(path15.dirname(dest)); - core13.debug(`Downloading ${url}`); - core13.debug(`Destination ${dest}`); + core14.debug(`Downloading ${url}`); + core14.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -83997,7 +83876,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core13.debug("set auth"); + core14.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -84006,7 +83885,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError(response.message.statusCode); - core13.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core14.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -84015,16 +83894,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs15.createWriteStream(dest)); - core13.debug("download complete"); + core14.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core13.debug("download failed"); + core14.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core13.debug(`Failed to delete '${dest}'. ${err.message}`); + core14.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -84039,7 +83918,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core13.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -84089,7 +83968,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core13.debug("Checking tar --version"); + core14.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -84099,7 +83978,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core13.debug(versionOutput.trim()); + core14.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -84107,7 +83986,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core13.isDebug() && !flags.includes("v")) { + if (core14.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -84139,7 +84018,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core13.isDebug()) { + if (core14.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -84184,7 +84063,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core13.debug(`Using pwsh at path: ${pwshPath}`); + core14.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -84204,7 +84083,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core13.debug(`Using powershell at path: ${powershellPath}`); + core14.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -84213,7 +84092,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core13.isDebug()) { + if (!core14.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -84224,8 +84103,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os5.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch2}`); - core13.debug(`source dir: ${sourceDir}`); + core14.debug(`Caching tool ${tool} ${version} ${arch2}`); + core14.debug(`source dir: ${sourceDir}`); if (!fs15.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -84243,14 +84122,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os5.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch2}`); - core13.debug(`source file: ${sourceFile}`); + core14.debug(`Caching tool ${tool} ${version} ${arch2}`); + core14.debug(`source file: ${sourceFile}`); if (!fs15.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path15.join(destFolder, targetFile); - core13.debug(`destination file ${destPath}`); + core14.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -84274,12 +84153,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path15.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core13.debug(`checking cache: ${cachePath}`); + core14.debug(`checking cache: ${cachePath}`); if (fs15.existsSync(cachePath) && fs15.existsSync(`${cachePath}.complete`)) { - core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core13.debug("not found"); + core14.debug("not found"); } } return toolPath; @@ -84310,7 +84189,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core13.debug("set auth"); + core14.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -84331,7 +84210,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core13.debug("Invalid json"); + core14.debug("Invalid json"); } } return releases; @@ -84357,7 +84236,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path15.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); - core13.debug(`destination ${folderPath}`); + core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -84369,19 +84248,19 @@ var require_tool_cache = __commonJS({ const folderPath = path15.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs15.writeFileSync(markerPath, ""); - core13.debug("finished caching tool"); + core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver9.clean(versionSpec) || ""; - core13.debug(`isExplicit: ${c}`); + core14.debug(`isExplicit: ${c}`); const valid3 = semver9.valid(c) != null; - core13.debug(`explicit? ${valid3}`); + core14.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core13.debug(`evaluating ${versions.length} versions`); + core14.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver9.gt(a, b)) { return 1; @@ -84397,9 +84276,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core13.debug(`matched: ${version}`); + core14.debug(`matched: ${version}`); } else { - core13.debug("match not found"); + core14.debug("match not found"); } return version; } @@ -84978,7 +84857,7 @@ var require_follow_redirects = __commonJS({ // src/init-action.ts var fs14 = __toESM(require("fs")); var path14 = __toESM(require("path")); -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); var io6 = __toESM(require_io()); var semver8 = __toESM(require_semver2()); @@ -85036,7 +84915,7 @@ var v4_default = v4; // src/actions-util.ts var fs2 = __toESM(require("fs")); var path2 = __toESM(require("path")); -var core3 = __toESM(require_core()); +var core4 = __toESM(require_core()); var toolrunner = __toESM(require_toolrunner()); var github = __toESM(require_github()); var io2 = __toESM(require_io()); @@ -85045,8 +84924,7 @@ var io2 = __toESM(require_io()); var fs = __toESM(require("fs")); var os = __toESM(require("os")); var path = __toESM(require("path")); -var import_util = require("util"); -var core2 = __toESM(require_core()); +var core3 = __toESM(require_core()); var exec = __toESM(require_exec()); var io = __toESM(require_io()); @@ -85187,7 +85065,61 @@ function checkDiskSpace(directoryPath, dependencies = { // src/util.ts var import_del = __toESM(require_del()); -var import_get_folder_size = __toESM(require_get_folder_size()); + +// node_modules/get-folder-size/index.js +var import_node_path2 = require("node:path"); +async function getFolderSize(itemPath, options) { + return await core(itemPath, options, { errors: true }); +} +getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); +getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); +async function core(rootItemPath, options = {}, returnType = {}) { + const fs15 = options.fs || await import("node:fs/promises"); + let folderSize = 0n; + const foundInos = /* @__PURE__ */ new Set(); + const errors = []; + await processItem(rootItemPath); + async function processItem(itemPath) { + if (options.ignore?.test(itemPath)) return; + const stats = returnType.strict ? await fs15.lstat(itemPath, { bigint: true }) : await fs15.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + if (typeof stats !== "object") return; + if (!foundInos.has(stats.ino)) { + foundInos.add(stats.ino); + folderSize += stats.size; + } + if (stats.isDirectory()) { + const directoryItems = returnType.strict ? await fs15.readdir(itemPath) : await fs15.readdir(itemPath).catch((error2) => errors.push(error2)); + if (typeof directoryItems !== "object") return; + await Promise.all( + directoryItems.map( + (directoryItem) => processItem((0, import_node_path2.join)(itemPath, directoryItem)) + ) + ); + } + } + if (!options.bigint) { + if (folderSize > BigInt(Number.MAX_SAFE_INTEGER)) { + const error2 = new RangeError( + "The folder size is too large to return as a Number. You can instruct this package to return a BigInt instead." + ); + if (returnType.strict) { + throw error2; + } + errors.push(error2); + folderSize = Number.MAX_SAFE_INTEGER; + } else { + folderSize = Number(folderSize); + } + } + if (returnType.errors) { + return { + size: folderSize, + errors: errors.length > 0 ? errors : null + }; + } else { + return folderSize; + } +} // node_modules/js-yaml/dist/js-yaml.mjs function isNothing(subject) { @@ -85758,7 +85690,7 @@ var json = failsafe.extend({ float ] }); -var core = json; +var core2 = json; var YAML_DATE_REGEXP = new RegExp( "^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$" ); @@ -85972,7 +85904,7 @@ var set = new type("tag:yaml.org,2002:set", { resolve: resolveYamlSet, construct: constructYamlSet }); -var _default = core.extend({ +var _default = core2.extend({ implicit: [ timestamp, merge @@ -88040,7 +87972,7 @@ function checkGitHubVersionInRange(version, logger) { ); } hasBeenWarnedAboutVersion = true; - core2.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true); + core3.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true); } function apiVersionInRange(version, minimumVersion2, maximumVersion2) { if (!semver.satisfies(version, `>=${minimumVersion2}`)) { @@ -88061,11 +87993,11 @@ function assertNever(value) { throw new ExhaustivityCheckingError(value); } function initializeEnvironment(version) { - core2.exportVariable("CODEQL_ACTION_FEATURE_MULTI_LANGUAGE" /* FEATURE_MULTI_LANGUAGE */, "false"); - core2.exportVariable("CODEQL_ACTION_FEATURE_SANDWICH" /* FEATURE_SANDWICH */, "false"); - core2.exportVariable("CODEQL_ACTION_FEATURE_SARIF_COMBINE" /* FEATURE_SARIF_COMBINE */, "true"); - core2.exportVariable("CODEQL_ACTION_FEATURE_WILL_UPLOAD" /* FEATURE_WILL_UPLOAD */, "true"); - core2.exportVariable("CODEQL_ACTION_VERSION" /* VERSION */, version); + core3.exportVariable("CODEQL_ACTION_FEATURE_MULTI_LANGUAGE" /* FEATURE_MULTI_LANGUAGE */, "false"); + core3.exportVariable("CODEQL_ACTION_FEATURE_SANDWICH" /* FEATURE_SANDWICH */, "false"); + core3.exportVariable("CODEQL_ACTION_FEATURE_SARIF_COMBINE" /* FEATURE_SARIF_COMBINE */, "true"); + core3.exportVariable("CODEQL_ACTION_FEATURE_WILL_UPLOAD" /* FEATURE_WILL_UPLOAD */, "true"); + core3.exportVariable("CODEQL_ACTION_VERSION" /* VERSION */, version); } function getRequiredEnvParam(paramName) { const value = process.env[paramName]; @@ -88119,7 +88051,7 @@ function getTestingEnvironment() { } async function tryGetFolderBytes(cacheDir, logger, quiet = false) { try { - return await (0, import_util.promisify)(import_get_folder_size.default)(cacheDir); + return await getFolderSize.loose(cacheDir); } catch (e) { if (!quiet || logger.isDebug()) { logger.warning( @@ -88149,7 +88081,7 @@ async function withTimeout(timeoutMs, promise, onTimeout) { } async function checkForTimeout() { if (hadTimeout === true) { - core2.info( + core3.info( "A timeout occurred, force exiting the process after 30 seconds to prevent hanging." ); await delay(3e4, { allowProcessExit: true }); @@ -88190,7 +88122,7 @@ async function checkDiskUsage(logger) { } else { logger.debug(message); } - core2.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true"); + core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true"); } return { numAvailableBytes: diskUsage.free, @@ -88210,10 +88142,10 @@ function checkActionVersion(version, githubVersion) { semver.coerce(githubVersion.version) ?? "0.0.0", ">=3.11" )) { - core2.error( + core3.error( "CodeQL Action major versions v1 and v2 have been deprecated. Please update all occurrences of the CodeQL Action in your workflow files to v3. For more information, see https://github.blog/changelog/2025-01-10-code-scanning-codeql-action-v2-is-now-deprecated/" ); - core2.exportVariable("CODEQL_ACTION_DID_LOG_VERSION_DEPRECATION" /* LOG_VERSION_DEPRECATION */, "true"); + core3.exportVariable("CODEQL_ACTION_DID_LOG_VERSION_DEPRECATION" /* LOG_VERSION_DEPRECATION */, "true"); } } } @@ -88244,13 +88176,13 @@ async function checkSipEnablement(logger) { if (sipStatusOutput.stdout.includes( "System Integrity Protection status: enabled." )) { - core2.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true"); + core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true"); return true; } if (sipStatusOutput.stdout.includes( "System Integrity Protection status: disabled." )) { - core2.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false"); + core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false"); return false; } } @@ -88297,14 +88229,14 @@ async function asyncSome(array, predicate) { // src/actions-util.ts var pkg = require_package(); var getRequiredInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); if (!value) { throw new ConfigurationError(`Input required and not supplied: ${name}`); } return value; }; var getOptionalInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); return value.length > 0 ? value : void 0; }; function getTemporaryDirectory() { @@ -88397,7 +88329,7 @@ var getFileType = async (filePath) => { }).exec(); return stdout.trim(); } catch (e) { - core3.info( + core4.info( `Could not determine type of ${filePath} from ${stdout}. ${stderr}` ); throw e; @@ -88470,7 +88402,7 @@ var persistInputs = function() { const inputEnvironmentVariables = Object.entries(process.env).filter( ([name]) => name.startsWith("INPUT_") ); - core3.saveState(persistedInputsKey, JSON.stringify(inputEnvironmentVariables)); + core4.saveState(persistedInputsKey, JSON.stringify(inputEnvironmentVariables)); }; function getPullRequestBranches() { const pullRequest = github.context.payload.pull_request; @@ -88500,7 +88432,7 @@ function isAnalyzingPullRequest() { } // src/api-client.ts -var core4 = __toESM(require_core()); +var core5 = __toESM(require_core()); var githubUtils = __toESM(require_utils4()); var retry = __toESM(require_dist_node15()); var import_console_log_level = __toESM(require_console_log_level()); @@ -88608,12 +88540,12 @@ async function getAnalysisKey() { const workflowPath = await getWorkflowRelativePath(); const jobName = getRequiredEnvParam("GITHUB_JOB"); analysisKey = `${workflowPath}:${jobName}`; - core4.exportVariable(analysisKeyEnvVar, analysisKey); + core5.exportVariable(analysisKeyEnvVar, analysisKey); return analysisKey; } // src/caching-utils.ts -var core5 = __toESM(require_core()); +var core6 = __toESM(require_core()); async function getTotalCacheSize(paths, logger, quiet = false) { const sizes = await Promise.all( paths.map((cacheDir) => tryGetFolderBytes(cacheDir, logger, quiet)) @@ -88639,7 +88571,7 @@ function getCachingKind(input) { case "restore": return "restore" /* Restore */; default: - core5.warning( + core6.warning( `Unrecognized 'dependency-caching' input: ${input}. Defaulting to 'none'.` ); return "none" /* None */; @@ -88698,13 +88630,13 @@ var path3 = __toESM(require("path")); var actionsCache = __toESM(require_cache3()); // src/git-utils.ts -var core6 = __toESM(require_core()); +var core7 = __toESM(require_core()); var toolrunner2 = __toESM(require_toolrunner()); var io3 = __toESM(require_io()); var runGitCommand = async function(workingDirectory, args, customErrorMessage) { let stdout = ""; let stderr = ""; - core6.debug(`Running git command: git ${args.join(" ")}`); + core7.debug(`Running git command: git ${args.join(" ")}`); try { await new toolrunner2.ToolRunner(await io3.which("git", true), args, { silent: true, @@ -88724,7 +88656,7 @@ var runGitCommand = async function(workingDirectory, args, customErrorMessage) { if (stderr.includes("not a git repository")) { reason = "The checkout path provided to the action does not appear to be a git repository."; } - core6.info(`git call failed. ${customErrorMessage} Error: ${reason}`); + core7.info(`git call failed. ${customErrorMessage} Error: ${reason}`); throw error2; } }; @@ -88847,7 +88779,7 @@ async function getRef() { ) !== head; if (hasChangedRef) { const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head"); - core6.debug( + core7.debug( `No longer on merge commit, rewriting ref from ${ref} to ${newRef}.` ); return newRef; @@ -88873,16 +88805,16 @@ async function isAnalyzingDefaultBranch() { } // src/logging.ts -var core7 = __toESM(require_core()); +var core8 = __toESM(require_core()); function getActionsLogger() { - return core7; + return core8; } async function withGroupAsync(groupName, f) { - core7.startGroup(groupName); + core8.startGroup(groupName); try { return await f(); } finally { - core7.endGroup(); + core8.endGroup(); } } function formatDuration(durationMs) { @@ -90538,7 +90470,7 @@ var io5 = __toESM(require_io()); // src/codeql.ts var fs11 = __toESM(require("fs")); var path11 = __toESM(require("path")); -var core9 = __toESM(require_core()); +var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); // src/cli-errors.ts @@ -90948,7 +90880,7 @@ var fs8 = __toESM(require("fs")); var os3 = __toESM(require("os")); var path8 = __toESM(require("path")); var import_perf_hooks2 = require("perf_hooks"); -var core8 = __toESM(require_core()); +var core9 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); var toolcache2 = __toESM(require_tool_cache()); var import_follow_redirects = __toESM(require_follow_redirects()); @@ -91002,10 +90934,10 @@ async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorizat }; } } catch (e) { - core8.warning( + core9.warning( `Failed to download and extract CodeQL bundle using streaming with error: ${getErrorMessage(e)}` ); - core8.warning(`Falling back to downloading the bundle before extracting.`); + core9.warning(`Falling back to downloading the bundle before extracting.`); await cleanUpGlob(dest, "CodeQL bundle", logger); } const toolsDownloadStart = import_perf_hooks2.performance.now(); @@ -92069,12 +92001,12 @@ ${output}` ); } else if (checkVersion && process.env["CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */] !== "true" && !await codeQlVersionAtLeast(codeql, CODEQL_NEXT_MINIMUM_VERSION)) { const result = await codeql.getVersion(); - core9.warning( + core10.warning( `CodeQL CLI version ${result.version} was discontinued on ${GHES_MOST_RECENT_DEPRECATION_DATE} alongside GitHub Enterprise Server ${GHES_VERSION_MOST_RECENTLY_DEPRECATED} and will not be supported by the next minor release of the CodeQL Action. Please update to CodeQL CLI version ${CODEQL_NEXT_MINIMUM_VERSION} or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version. Alternatively, if you want to continue using CodeQL CLI version ${result.version}, you can replace 'github/codeql-action/*@v${getActionVersion().split(".")[0]}' by 'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to continue using this version of the CodeQL Action.` ); - core9.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); + core10.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); } return codeql; } @@ -92336,7 +92268,7 @@ function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = // src/status-report.ts var os4 = __toESM(require("os")); -var core10 = __toESM(require_core()); +var core11 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -92352,12 +92284,12 @@ function getActionsStatus(error2, otherFailureCause) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core10.exportVariable( + core11.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core10.exportVariable( + core11.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -92376,14 +92308,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core10.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core10.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -92461,9 +92393,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scan async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core10.debug(`Sending status report: ${statusReportJSON}`); + core11.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core10.debug("In test mode. Status reports are not uploaded."); + core11.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -92482,26 +92414,26 @@ async function sendStatusReport(statusReport) { switch (e.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core10.warning( + core11.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading Code Scanning results requires write access. To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core10.warning(e.message); + core11.warning(e.message); } return; case 404: - core10.warning(e.message); + core11.warning(e.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core10.debug(INCOMPATIBLE_MSG); + core11.debug(INCOMPATIBLE_MSG); } else { - core10.debug(OUT_OF_DATE_MSG); + core11.debug(OUT_OF_DATE_MSG); } return; } } - core10.warning( + core11.warning( `An unexpected error occurred when sending code scanning status report: ${getErrorMessage( e )}` @@ -92513,7 +92445,7 @@ async function sendStatusReport(statusReport) { var fs13 = __toESM(require("fs")); var path13 = __toESM(require("path")); var import_zlib = __toESM(require("zlib")); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); function toCodedErrors(errors) { return Object.entries(errors).reduce( (acc, [code, message]) => { @@ -92619,7 +92551,7 @@ async function validateWorkflow(codeql, logger) { } catch (e) { return `error: formatWorkflowErrors() failed: ${String(e)}`; } - core11.warning(message); + core12.warning(message); } return formatWorkflowCause(workflowErrors); } @@ -92791,8 +92723,8 @@ async function run() { ); const jobRunUuid = v4_default(); logger.info(`Job run UUID is ${jobRunUuid}.`); - core12.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); - core12.exportVariable("CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */, "true"); + core13.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); + core13.exportVariable("CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */, "true"); const configFile = getOptionalInput("config-file"); const sourceRoot = path14.resolve( getRequiredEnvParam("GITHUB_WORKSPACE"), @@ -92827,7 +92759,7 @@ async function run() { toolsVersion = initCodeQLResult.toolsVersion; toolsSource = initCodeQLResult.toolsSource; zstdAvailability = initCodeQLResult.zstdAvailability; - core12.startGroup("Validating workflow"); + core13.startGroup("Validating workflow"); const validateWorkflowResult = await validateWorkflow(codeql, logger); if (validateWorkflowResult === void 0) { logger.info("Detected no issues with the code scanning workflow."); @@ -92836,7 +92768,7 @@ async function run() { `Unable to validate code scanning workflow: ${validateWorkflowResult}` ); } - core12.endGroup(); + core13.endGroup(); if ( // Only enable the experimental features env variable for Rust analysis if the user has explicitly // requested rust - don't enable it via language autodetection. @@ -92851,7 +92783,7 @@ async function run() { ); } if (semver8.lt(actualVer, publicPreview)) { - core12.exportVariable("CODEQL_ENABLE_EXPERIMENTAL_FEATURES" /* EXPERIMENTAL_FEATURES */, "true"); + core13.exportVariable("CODEQL_ENABLE_EXPERIMENTAL_FEATURES" /* EXPERIMENTAL_FEATURES */, "true"); logger.info("Experimental Rust analysis enabled"); } } @@ -92871,7 +92803,7 @@ async function run() { // - The `init` Action is passed `debug: true`. // - Actions step debugging is enabled (e.g. by [enabling debug logging for a rerun](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-all-the-jobs-in-a-workflow), // or by setting the `ACTIONS_STEP_DEBUG` secret to `true`). - debugMode: getOptionalInput("debug") === "true" || core12.isDebug(), + debugMode: getOptionalInput("debug") === "true" || core13.isDebug(), debugArtifactName: getOptionalInput("debug-artifact-name") || DEFAULT_DEBUG_ARTIFACT_NAME, debugDatabaseName: getOptionalInput("debug-database-name") || DEFAULT_DEBUG_DATABASE_NAME, repository: repositoryNwo, @@ -92887,7 +92819,7 @@ async function run() { await checkInstallPython311(config.languages, codeql); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); - core12.setFailed(error2.message); + core13.setFailed(error2.message); const statusReportBase = await createStatusReportBase( "init" /* Init */, error2 instanceof ConfigurationError ? "user-error" : "aborted", @@ -92946,8 +92878,8 @@ async function run() { } const goFlags = process.env["GOFLAGS"]; if (goFlags) { - core12.exportVariable("GOFLAGS", goFlags); - core12.warning( + core13.exportVariable("GOFLAGS", goFlags); + core13.warning( "Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action." ); } @@ -92971,7 +92903,7 @@ async function run() { "bin" ); fs14.mkdirSync(tempBinPath, { recursive: true }); - core12.addPath(tempBinPath); + core13.addPath(tempBinPath); const goWrapperPath = path14.resolve(tempBinPath, "go"); fs14.writeFileSync( goWrapperPath, @@ -92980,14 +92912,14 @@ async function run() { exec ${goBinaryPath} "$@"` ); fs14.chmodSync(goWrapperPath, "755"); - core12.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goWrapperPath); + core13.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goWrapperPath); } catch (e) { logger.warning( `Analyzing Go on Linux, but failed to install wrapper script. Tracing custom builds may fail: ${e}` ); } } else { - core12.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goBinaryPath); + core13.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goBinaryPath); } } catch (e) { logger.warning( @@ -93014,20 +92946,20 @@ exec ${goBinaryPath} "$@"` } } } - core12.exportVariable( + core13.exportVariable( "CODEQL_RAM", process.env["CODEQL_RAM"] || getMemoryFlagValue(getOptionalInput("ram"), logger).toString() ); - core12.exportVariable( + core13.exportVariable( "CODEQL_THREADS", process.env["CODEQL_THREADS"] || getThreadsFlagValue(getOptionalInput("threads"), logger).toString() ); if (await features.getValue("disable_kotlin_analysis_enabled" /* DisableKotlinAnalysisEnabled */)) { - core12.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true"); + core13.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true"); } const kotlinLimitVar = "CODEQL_EXTRACTOR_KOTLIN_OVERRIDE_MAXIMUM_VERSION_LIMIT"; if (await codeQlVersionAtLeast(codeql, "2.20.3") && !await codeQlVersionAtLeast(codeql, "2.20.4")) { - core12.exportVariable(kotlinLimitVar, "2.1.20"); + core13.exportVariable(kotlinLimitVar, "2.1.20"); } if (config.languages.includes("cpp" /* cpp */)) { const envVar = "CODEQL_EXTRACTOR_CPP_TRAP_CACHING"; @@ -93037,10 +92969,10 @@ exec ${goBinaryPath} "$@"` ); } else if (getTrapCachingEnabled() && await codeQlVersionAtLeast(codeql, "2.17.5")) { logger.info("Enabling CodeQL C++ TRAP caching support"); - core12.exportVariable(envVar, "true"); + core13.exportVariable(envVar, "true"); } else { logger.info("Disabling CodeQL C++ TRAP caching support"); - core12.exportVariable(envVar, "false"); + core13.exportVariable(envVar, "false"); } } if (shouldRestoreCache(config.dependencyCachingEnabled)) { @@ -93048,7 +92980,7 @@ exec ${goBinaryPath} "$@"` } if (await codeQlVersionAtLeast(codeql, "2.17.1")) { } else { - core12.exportVariable( + core13.exportVariable( "CODEQL_EXTRACTOR_PYTHON_DISABLE_LIBRARY_EXTRACTION", "true" ); @@ -93074,7 +93006,7 @@ exec ${goBinaryPath} "$@"` "python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */, codeql )) { - core12.exportVariable("CODEQL_EXTRACTOR_PYTHON_EXTRACT_STDLIB", "true"); + core13.exportVariable("CODEQL_EXTRACTOR_PYTHON_EXTRACT_STDLIB", "true"); } } const { registriesAuthTokens, qlconfigFile } = await generateRegistries( @@ -93116,15 +93048,15 @@ exec ${goBinaryPath} "$@"` const tracerConfig = await getCombinedTracerConfig(codeql, config); if (tracerConfig !== void 0) { for (const [key, value] of Object.entries(tracerConfig.env)) { - core12.exportVariable(key, value); + core13.exportVariable(key, value); } } flushDiagnostics(config); - core12.setOutput("codeql-path", config.codeQLCmd); - core12.setOutput("codeql-version", (await codeql.getVersion()).version); + core13.setOutput("codeql-path", config.codeQLCmd); + core13.setOutput("codeql-version", (await codeql.getVersion()).version); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); - core12.setFailed(error2.message); + core13.setFailed(error2.message); await sendCompletedStatusReport( startedAt, config, @@ -93184,7 +93116,7 @@ async function runWrapper() { try { await run(); } catch (error2) { - core12.setFailed(`init action failed: ${getErrorMessage(error2)}`); + core13.setFailed(`init action failed: ${getErrorMessage(error2)}`); } await checkForTimeout(); } diff --git a/lib/resolve-environment-action.js b/lib/resolve-environment-action.js index b790a67ce..f5d5d6ce8 100644 --- a/lib/resolve-environment-action.js +++ b/lib/resolve-environment-action.js @@ -34336,127 +34336,6 @@ var require_del = __commonJS({ } }); -// node_modules/tiny-each-async/index.js -var require_tiny_each_async = __commonJS({ - "node_modules/tiny-each-async/index.js"(exports2, module2) { - "use strict"; - module2.exports = function eachAsync(arr, parallelLimit, iteratorFn, cb) { - var pending = 0; - var index = 0; - var lastIndex = arr.length - 1; - var called = false; - var limit; - var callback; - var iterate; - if (typeof parallelLimit === "number") { - limit = parallelLimit; - iterate = iteratorFn; - callback = cb || function noop() { - }; - } else { - iterate = parallelLimit; - callback = iteratorFn || function noop() { - }; - limit = arr.length; - } - if (!arr.length) { - return callback(); - } - var iteratorLength = iterate.length; - var shouldCallNextIterator = function shouldCallNextIterator2() { - return !called && pending < limit && index < lastIndex; - }; - var iteratorCallback = function iteratorCallback2(err) { - if (called) { - return; - } - pending--; - if (err || index === lastIndex && !pending) { - called = true; - callback(err); - } else if (shouldCallNextIterator()) { - processIterator(++index); - } - }; - var processIterator = function processIterator2() { - pending++; - var args = iteratorLength === 2 ? [arr[index], iteratorCallback] : [arr[index], index, iteratorCallback]; - iterate.apply(null, args); - if (shouldCallNextIterator()) { - processIterator2(++index); - } - }; - processIterator(); - }; - } -}); - -// node_modules/get-folder-size/index.js -var require_get_folder_size = __commonJS({ - "node_modules/get-folder-size/index.js"(exports2, module2) { - "use strict"; - var fs5 = require("fs"); - var path5 = require("path"); - var eachAsync = require_tiny_each_async(); - function readSizeRecursive(seen, item, ignoreRegEx, callback) { - let cb; - let ignoreRegExp; - if (!callback) { - cb = ignoreRegEx; - ignoreRegExp = null; - } else { - cb = callback; - ignoreRegExp = ignoreRegEx; - } - fs5.lstat(item, function lstat(e, stats) { - let total = !e ? stats.size || 0 : 0; - if (stats) { - if (seen.has(stats.ino)) { - return cb(null, 0); - } - seen.add(stats.ino); - } - if (!e && stats.isDirectory()) { - fs5.readdir(item, (err, list) => { - if (err) { - return cb(err); - } - eachAsync( - list, - 5e3, - (dirItem, next) => { - readSizeRecursive( - seen, - path5.join(item, dirItem), - ignoreRegExp, - (error2, size) => { - if (!error2) { - total += size; - } - next(error2); - } - ); - }, - (finalErr) => { - cb(finalErr, total); - } - ); - }); - } else { - if (ignoreRegExp && ignoreRegExp.test(item)) { - total = 0; - } - cb(e, total); - } - }); - } - module2.exports = (...args) => { - args.unshift(/* @__PURE__ */ new Set()); - return readSizeRecursive(...args); - }; - } -}); - // node_modules/semver/internal/constants.js var require_constants9 = __commonJS({ "node_modules/semver/internal/constants.js"(exports2, module2) { @@ -36412,7 +36291,7 @@ var require_package = __commonJS({ "fast-deep-equal": "^3.1.3", "file-url": "^3.0.0", "follow-redirects": "^1.15.11", - "get-folder-size": "^2.0.1", + "get-folder-size": "^5.0.0", "js-yaml": "^4.1.0", jsonschema: "1.4.1", long: "^5.3.2", @@ -38076,7 +37955,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core12 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -38086,15 +37965,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core12.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core12.debug(`implicitDescendants '${result.implicitDescendants}'`); + core13.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core12.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -38758,7 +38637,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core12 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var fs5 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path5 = __importStar4(require("path")); @@ -38809,7 +38688,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core12.debug(`Search path '${searchPath}'`); + core13.debug(`Search path '${searchPath}'`); try { yield __await4(fs5.promises.lstat(searchPath)); } catch (err) { @@ -38881,7 +38760,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core12.debug(`Broken symlink '${item.path}'`); + core13.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -38897,7 +38776,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core12.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -40221,7 +40100,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core12 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob = __importStar4(require_glob2()); var io5 = __importStar4(require_io()); @@ -40274,7 +40153,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path5.relative(workspace, file).replace(new RegExp(`\\${path5.sep}`, "g"), "/"); - core12.debug(`Matched: ${relativeFile}`); + core13.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -40304,7 +40183,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core12.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core13.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -40315,10 +40194,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core12.debug(err.message); + core13.debug(err.message); } versionOutput = versionOutput.trim(); - core12.debug(versionOutput); + core13.debug(versionOutput); return versionOutput; }); } @@ -40326,7 +40205,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core12.debug(`zstd version: ${version}`); + core13.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -75638,7 +75517,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core12 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -75680,7 +75559,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core12.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core13.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -75735,14 +75614,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core12.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core13.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core12.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core13.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -75813,7 +75692,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core12 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -75874,9 +75753,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core12.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core13.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core12.debug(`${name} - Error is not retryable`); + core13.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -75981,7 +75860,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core12 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -76019,7 +75898,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core12.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core13.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -76053,7 +75932,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core12.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core13.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -76103,7 +75982,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core12.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core13.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -76114,7 +75993,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core12.debug("Unable to validate download, no Content-Length header"); + core13.debug("Unable to validate download, no Content-Length header"); } }); } @@ -76234,7 +76113,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core12.debug("Unable to determine content length, downloading file with http-client..."); + core13.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -76314,7 +76193,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core12 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -76334,9 +76213,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core12.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core12.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core13.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core13.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -76373,12 +76252,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core12.debug(`Download concurrency: ${result.downloadConcurrency}`); - core12.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core12.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core12.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core12.debug(`Lookup only: ${result.lookupOnly}`); + core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core13.debug(`Download concurrency: ${result.downloadConcurrency}`); + core13.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core13.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core13.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core13.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -76558,7 +76437,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core12 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs5 = __importStar4(require("fs")); @@ -76576,7 +76455,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core12.debug(`Resource Url: ${url}`); + core13.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -76604,7 +76483,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core12.isDebug()) { + if (core13.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -76617,9 +76496,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core12.setSecret(cacheDownloadUrl); - core12.debug(`Cache Result:`); - core12.debug(JSON.stringify(cacheResult)); + core13.setSecret(cacheDownloadUrl); + core13.debug(`Cache Result:`); + core13.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -76634,10 +76513,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core12.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core13.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core12.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core13.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -76682,7 +76561,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core12.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core13.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -76704,7 +76583,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core12.debug("Awaiting all uploads"); + core13.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -76747,16 +76626,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core12.debug("Upload cache"); + core13.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core12.debug("Commiting cache"); + core13.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core13.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core12.info("Cache saved successfully"); + core13.info("Cache saved successfully"); } }); } @@ -82182,7 +82061,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core12 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var path5 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -82235,7 +82114,7 @@ var require_cache3 = __commonJS({ function restoreCache3(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core12.debug(`Cache service version: ${cacheServiceVersion}`); + core13.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -82251,8 +82130,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core12.debug("Resolved Keys:"); - core12.debug(JSON.stringify(keys)); + core13.debug("Resolved Keys:"); + core13.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82270,19 +82149,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core12.info("Lookup only - skipping download"); + core13.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path5.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); + core13.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core12.isDebug()) { + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core12.info("Cache restored successfully"); + core13.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -82290,16 +82169,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to restore: ${error2.message}`); + core13.error(`Failed to restore: ${error2.message}`); } else { - core12.warning(`Failed to restore: ${error2.message}`); + core13.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core12.debug(`Failed to delete archive: ${error2}`); + core13.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82310,8 +82189,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core12.debug("Resolved Keys:"); - core12.debug(JSON.stringify(keys)); + core13.debug("Resolved Keys:"); + core13.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82329,30 +82208,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core12.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core13.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core12.info(`Cache hit for restore-key: ${response.matchedKey}`); + core13.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core12.info(`Cache hit for: ${response.matchedKey}`); + core13.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core12.info("Lookup only - skipping download"); + core13.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path5.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive path: ${archivePath}`); - core12.debug(`Starting download of archive to: ${archivePath}`); + core13.debug(`Archive path: ${archivePath}`); + core13.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core12.isDebug()) { + core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core12.info("Cache restored successfully"); + core13.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -82360,9 +82239,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to restore: ${error2.message}`); + core13.error(`Failed to restore: ${error2.message}`); } else { - core12.warning(`Failed to restore: ${error2.message}`); + core13.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -82371,7 +82250,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core12.debug(`Failed to delete archive: ${error2}`); + core13.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82380,7 +82259,7 @@ var require_cache3 = __commonJS({ function saveCache3(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core12.debug(`Cache service version: ${cacheServiceVersion}`); + core13.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -82399,26 +82278,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core12.debug("Cache Paths:"); - core12.debug(`${JSON.stringify(cachePaths)}`); + core13.debug("Cache Paths:"); + core13.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path5.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); + core13.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core12.isDebug()) { + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.debug(`File Size: ${archiveFileSize}`); + core13.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core12.debug("Reserving Cache"); + core13.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -82431,26 +82310,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core12.debug(`Saving Cache (ID: ${cacheId})`); + core13.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core12.info(`Failed to save: ${typedError.message}`); + core13.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to save: ${typedError.message}`); + core13.error(`Failed to save: ${typedError.message}`); } else { - core12.warning(`Failed to save: ${typedError.message}`); + core13.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core12.debug(`Failed to delete archive: ${error2}`); + core13.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -82463,26 +82342,26 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core12.debug("Cache Paths:"); - core12.debug(`${JSON.stringify(cachePaths)}`); + core13.debug("Cache Paths:"); + core13.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path5.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); + core13.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core12.isDebug()) { + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.debug(`File Size: ${archiveFileSize}`); + core13.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } options.archiveSizeBytes = archiveFileSize; - core12.debug("Reserving Cache"); + core13.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -82496,10 +82375,10 @@ var require_cache3 = __commonJS({ } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core12.debug(`Failed to reserve cache: ${error2}`); + core13.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core12.debug(`Attempting to upload cache located at: ${archivePath}`); + core13.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -82507,7 +82386,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core12.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core13.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); } @@ -82517,19 +82396,19 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core12.info(`Failed to save: ${typedError.message}`); + core13.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to save: ${typedError.message}`); + core13.error(`Failed to save: ${typedError.message}`); } else { - core12.warning(`Failed to save: ${typedError.message}`); + core13.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core12.debug(`Failed to delete archive: ${error2}`); + core13.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -82737,7 +82616,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core12 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -82760,10 +82639,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core12.info(err.message); + core13.info(err.message); } const seconds = this.getSleepAmount(); - core12.info(`Waiting ${seconds} seconds before trying again`); + core13.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -82843,7 +82722,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core12 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var io5 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs5 = __importStar4(require("fs")); @@ -82872,8 +82751,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path5.join(_getTempDirectory(), crypto.randomUUID()); yield io5.mkdirP(path5.dirname(dest)); - core12.debug(`Downloading ${url}`); - core12.debug(`Destination ${dest}`); + core13.debug(`Downloading ${url}`); + core13.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -82900,7 +82779,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core12.debug("set auth"); + core13.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -82909,7 +82788,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError(response.message.statusCode); - core12.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core13.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream.pipeline); @@ -82918,16 +82797,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs5.createWriteStream(dest)); - core12.debug("download complete"); + core13.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core12.debug("download failed"); + core13.debug("download failed"); try { yield io5.rmRF(dest); } catch (err) { - core12.debug(`Failed to delete '${dest}'. ${err.message}`); + core13.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -82942,7 +82821,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core12.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core13.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -82992,7 +82871,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core12.debug("Checking tar --version"); + core13.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -83002,7 +82881,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core12.debug(versionOutput.trim()); + core13.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -83010,7 +82889,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core12.isDebug() && !flags.includes("v")) { + if (core13.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -83042,7 +82921,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core12.isDebug()) { + if (core13.isDebug()) { args.push("-v"); } const xarPath = yield io5.which("xar", true); @@ -83087,7 +82966,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core12.debug(`Using pwsh at path: ${pwshPath}`); + core13.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -83107,7 +82986,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io5.which("powershell", true); - core12.debug(`Using powershell at path: ${powershellPath}`); + core13.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -83116,7 +82995,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io5.which("unzip", true); const args = [file]; - if (!core12.isDebug()) { + if (!core13.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -83127,8 +83006,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch = arch || os2.arch(); - core12.debug(`Caching tool ${tool} ${version} ${arch}`); - core12.debug(`source dir: ${sourceDir}`); + core13.debug(`Caching tool ${tool} ${version} ${arch}`); + core13.debug(`source dir: ${sourceDir}`); if (!fs5.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -83146,14 +83025,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch = arch || os2.arch(); - core12.debug(`Caching tool ${tool} ${version} ${arch}`); - core12.debug(`source file: ${sourceFile}`); + core13.debug(`Caching tool ${tool} ${version} ${arch}`); + core13.debug(`source file: ${sourceFile}`); if (!fs5.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch); const destPath = path5.join(destFolder, targetFile); - core12.debug(`destination file ${destPath}`); + core13.debug(`destination file ${destPath}`); yield io5.cp(sourceFile, destPath); _completeToolPath(tool, version, arch); return destFolder; @@ -83177,12 +83056,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path5.join(_getCacheDirectory(), toolName, versionSpec, arch); - core12.debug(`checking cache: ${cachePath}`); + core13.debug(`checking cache: ${cachePath}`); if (fs5.existsSync(cachePath) && fs5.existsSync(`${cachePath}.complete`)) { - core12.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); + core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { - core12.debug("not found"); + core13.debug("not found"); } } return toolPath; @@ -83213,7 +83092,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core12.debug("set auth"); + core13.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -83234,7 +83113,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core12.debug("Invalid json"); + core13.debug("Invalid json"); } } return releases; @@ -83260,7 +83139,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path5.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); - core12.debug(`destination ${folderPath}`); + core13.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io5.rmRF(folderPath); yield io5.rmRF(markerPath); @@ -83272,19 +83151,19 @@ var require_tool_cache = __commonJS({ const folderPath = path5.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; fs5.writeFileSync(markerPath, ""); - core12.debug("finished caching tool"); + core13.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core12.debug(`isExplicit: ${c}`); + core13.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core12.debug(`explicit? ${valid3}`); + core13.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core12.debug(`evaluating ${versions.length} versions`); + core13.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -83300,9 +83179,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core12.debug(`matched: ${version}`); + core13.debug(`matched: ${version}`); } else { - core12.debug("match not found"); + core13.debug("match not found"); } return version; } @@ -83879,18 +83758,18 @@ var require_follow_redirects = __commonJS({ }); // src/resolve-environment-action.ts -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); // src/actions-util.ts var fs = __toESM(require("fs")); -var core3 = __toESM(require_core()); +var core4 = __toESM(require_core()); var toolrunner = __toESM(require_toolrunner()); var github = __toESM(require_github()); var io2 = __toESM(require_io()); // src/util.ts var path = __toESM(require("path")); -var core2 = __toESM(require_core()); +var core3 = __toESM(require_core()); var exec = __toESM(require_exec()); var io = __toESM(require_io()); @@ -84031,7 +83910,61 @@ function checkDiskSpace(directoryPath, dependencies = { // src/util.ts var import_del = __toESM(require_del()); -var import_get_folder_size = __toESM(require_get_folder_size()); + +// node_modules/get-folder-size/index.js +var import_node_path2 = require("node:path"); +async function getFolderSize(itemPath, options) { + return await core(itemPath, options, { errors: true }); +} +getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); +getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); +async function core(rootItemPath, options = {}, returnType = {}) { + const fs5 = options.fs || await import("node:fs/promises"); + let folderSize = 0n; + const foundInos = /* @__PURE__ */ new Set(); + const errors = []; + await processItem(rootItemPath); + async function processItem(itemPath) { + if (options.ignore?.test(itemPath)) return; + const stats = returnType.strict ? await fs5.lstat(itemPath, { bigint: true }) : await fs5.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + if (typeof stats !== "object") return; + if (!foundInos.has(stats.ino)) { + foundInos.add(stats.ino); + folderSize += stats.size; + } + if (stats.isDirectory()) { + const directoryItems = returnType.strict ? await fs5.readdir(itemPath) : await fs5.readdir(itemPath).catch((error2) => errors.push(error2)); + if (typeof directoryItems !== "object") return; + await Promise.all( + directoryItems.map( + (directoryItem) => processItem((0, import_node_path2.join)(itemPath, directoryItem)) + ) + ); + } + } + if (!options.bigint) { + if (folderSize > BigInt(Number.MAX_SAFE_INTEGER)) { + const error2 = new RangeError( + "The folder size is too large to return as a Number. You can instruct this package to return a BigInt instead." + ); + if (returnType.strict) { + throw error2; + } + errors.push(error2); + folderSize = Number.MAX_SAFE_INTEGER; + } else { + folderSize = Number(folderSize); + } + } + if (returnType.errors) { + return { + size: folderSize, + errors: errors.length > 0 ? errors : null + }; + } else { + return folderSize; + } +} // node_modules/js-yaml/dist/js-yaml.mjs function isNothing(subject) { @@ -84602,7 +84535,7 @@ var json = failsafe.extend({ float ] }); -var core = json; +var core2 = json; var YAML_DATE_REGEXP = new RegExp( "^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$" ); @@ -84816,7 +84749,7 @@ var set = new type("tag:yaml.org,2002:set", { resolve: resolveYamlSet, construct: constructYamlSet }); -var _default = core.extend({ +var _default = core2.extend({ implicit: [ timestamp, merge @@ -86694,7 +86627,7 @@ function checkGitHubVersionInRange(version, logger) { ); } hasBeenWarnedAboutVersion = true; - core2.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true); + core3.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true); } function apiVersionInRange(version, minimumVersion2, maximumVersion2) { if (!semver.satisfies(version, `>=${minimumVersion2}`)) { @@ -86755,7 +86688,7 @@ function getTestingEnvironment() { var hadTimeout = false; async function checkForTimeout() { if (hadTimeout === true) { - core2.info( + core3.info( "A timeout occurred, force exiting the process after 30 seconds to prevent hanging." ); await delay(3e4, { allowProcessExit: true }); @@ -86785,7 +86718,7 @@ async function checkDiskUsage(logger) { } else { logger.debug(message); } - core2.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true"); + core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true"); } return { numAvailableBytes: diskUsage.free, @@ -86805,10 +86738,10 @@ function checkActionVersion(version, githubVersion) { semver.coerce(githubVersion.version) ?? "0.0.0", ">=3.11" )) { - core2.error( + core3.error( "CodeQL Action major versions v1 and v2 have been deprecated. Please update all occurrences of the CodeQL Action in your workflow files to v3. For more information, see https://github.blog/changelog/2025-01-10-code-scanning-codeql-action-v2-is-now-deprecated/" ); - core2.exportVariable("CODEQL_ACTION_DID_LOG_VERSION_DEPRECATION" /* LOG_VERSION_DEPRECATION */, "true"); + core3.exportVariable("CODEQL_ACTION_DID_LOG_VERSION_DEPRECATION" /* LOG_VERSION_DEPRECATION */, "true"); } } } @@ -86825,13 +86758,13 @@ async function checkSipEnablement(logger) { if (sipStatusOutput.stdout.includes( "System Integrity Protection status: enabled." )) { - core2.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true"); + core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true"); return true; } if (sipStatusOutput.stdout.includes( "System Integrity Protection status: disabled." )) { - core2.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false"); + core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false"); return false; } } @@ -86851,14 +86784,14 @@ async function asyncSome(array, predicate) { // src/actions-util.ts var pkg = require_package(); var getRequiredInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); if (!value) { throw new ConfigurationError(`Input required and not supplied: ${name}`); } return value; }; var getOptionalInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); return value.length > 0 ? value : void 0; }; function getTemporaryDirectory() { @@ -86972,7 +86905,7 @@ async function runTool(cmd, args = [], opts = {}) { } // src/api-client.ts -var core4 = __toESM(require_core()); +var core5 = __toESM(require_core()); var githubUtils = __toESM(require_utils4()); var retry = __toESM(require_dist_node15()); var import_console_log_level = __toESM(require_console_log_level()); @@ -87077,7 +87010,7 @@ async function getAnalysisKey() { const workflowPath = await getWorkflowRelativePath(); const jobName = getRequiredEnvParam("GITHUB_JOB"); analysisKey = `${workflowPath}:${jobName}`; - core4.exportVariable(analysisKeyEnvVar, analysisKey); + core5.exportVariable(analysisKeyEnvVar, analysisKey); return analysisKey; } @@ -87332,7 +87265,7 @@ var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { var supportedAnalysisKinds = new Set(Object.values(AnalysisKind)); // src/caching-utils.ts -var core5 = __toESM(require_core()); +var core6 = __toESM(require_core()); // src/feature-flags.ts var semver3 = __toESM(require_semver2()); @@ -87343,13 +87276,13 @@ var path2 = __toESM(require("path")); var actionsCache = __toESM(require_cache3()); // src/git-utils.ts -var core6 = __toESM(require_core()); +var core7 = __toESM(require_core()); var toolrunner2 = __toESM(require_toolrunner()); var io3 = __toESM(require_io()); var runGitCommand = async function(workingDirectory, args, customErrorMessage) { let stdout = ""; let stderr = ""; - core6.debug(`Running git command: git ${args.join(" ")}`); + core7.debug(`Running git command: git ${args.join(" ")}`); try { await new toolrunner2.ToolRunner(await io3.which("git", true), args, { silent: true, @@ -87369,7 +87302,7 @@ var runGitCommand = async function(workingDirectory, args, customErrorMessage) { if (stderr.includes("not a git repository")) { reason = "The checkout path provided to the action does not appear to be a git repository."; } - core6.info(`git call failed. ${customErrorMessage} Error: ${reason}`); + core7.info(`git call failed. ${customErrorMessage} Error: ${reason}`); throw error2; } }; @@ -87480,7 +87413,7 @@ async function getRef() { ) !== head; if (hasChangedRef) { const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head"); - core6.debug( + core7.debug( `No longer on merge commit, rewriting ref from ${ref} to ${newRef}.` ); return newRef; @@ -87506,9 +87439,9 @@ async function isAnalyzingDefaultBranch() { } // src/logging.ts -var core7 = __toESM(require_core()); +var core8 = __toESM(require_core()); function getActionsLogger() { - return core7; + return core8; } // src/overlay-database-utils.ts @@ -87845,7 +87778,7 @@ function generateCodeScanningConfig(originalUserInput, augmentationProperties) { // src/codeql.ts var fs4 = __toESM(require("fs")); var path4 = __toESM(require("path")); -var core9 = __toESM(require_core()); +var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); // src/setup-codeql.ts @@ -87860,7 +87793,7 @@ var toolcache = __toESM(require_tool_cache()); var semver5 = __toESM(require_semver2()); // src/tools-download.ts -var core8 = __toESM(require_core()); +var core9 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); var toolcache2 = __toESM(require_tool_cache()); var import_follow_redirects = __toESM(require_follow_redirects()); @@ -88319,12 +88252,12 @@ ${output}` ); } else if (checkVersion && process.env["CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */] !== "true" && !await codeQlVersionAtLeast(codeql, CODEQL_NEXT_MINIMUM_VERSION)) { const result = await codeql.getVersion(); - core9.warning( + core10.warning( `CodeQL CLI version ${result.version} was discontinued on ${GHES_MOST_RECENT_DEPRECATION_DATE} alongside GitHub Enterprise Server ${GHES_VERSION_MOST_RECENTLY_DEPRECATED} and will not be supported by the next minor release of the CodeQL Action. Please update to CodeQL CLI version ${CODEQL_NEXT_MINIMUM_VERSION} or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version. Alternatively, if you want to continue using CodeQL CLI version ${result.version}, you can replace 'github/codeql-action/*@v${getActionVersion().split(".")[0]}' by 'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to continue using this version of the CodeQL Action.` ); - core9.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); + core10.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); } return codeql; } @@ -88443,7 +88376,7 @@ async function runResolveBuildEnvironment(cmd, logger, workingDir, language) { // src/status-report.ts var os = __toESM(require("os")); -var core10 = __toESM(require_core()); +var core11 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -88459,12 +88392,12 @@ function getActionsStatus(error2, otherFailureCause) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core10.exportVariable( + core11.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core10.exportVariable( + core11.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -88483,14 +88416,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core10.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core10.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -88568,9 +88501,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scan async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core10.debug(`Sending status report: ${statusReportJSON}`); + core11.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core10.debug("In test mode. Status reports are not uploaded."); + core11.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -88589,26 +88522,26 @@ async function sendStatusReport(statusReport) { switch (e.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core10.warning( + core11.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading Code Scanning results requires write access. To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core10.warning(e.message); + core11.warning(e.message); } return; case 404: - core10.warning(e.message); + core11.warning(e.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core10.debug(INCOMPATIBLE_MSG); + core11.debug(INCOMPATIBLE_MSG); } else { - core10.debug(OUT_OF_DATE_MSG); + core11.debug(OUT_OF_DATE_MSG); } return; } } - core10.warning( + core11.warning( `An unexpected error occurred when sending code scanning status report: ${getErrorMessage( e )}` @@ -88650,16 +88583,16 @@ async function run() { workingDirectory, getRequiredInput("language") ); - core11.setOutput(ENVIRONMENT_OUTPUT_NAME, result); + core12.setOutput(ENVIRONMENT_OUTPUT_NAME, result); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); if (error2 instanceof CliError) { - core11.setOutput(ENVIRONMENT_OUTPUT_NAME, {}); + core12.setOutput(ENVIRONMENT_OUTPUT_NAME, {}); logger.warning( `Failed to resolve a build environment suitable for automatically building your code. ${error2.message}` ); } else { - core11.setFailed( + core12.setFailed( `Failed to resolve a build environment suitable for automatically building your code. ${error2.message}` ); const statusReportBase2 = await createStatusReportBase( @@ -88694,7 +88627,7 @@ async function runWrapper() { try { await run(); } catch (error2) { - core11.setFailed( + core12.setFailed( `${"resolve-environment" /* ResolveEnvironment */} action failed: ${getErrorMessage( error2 )}` diff --git a/lib/start-proxy-action-post.js b/lib/start-proxy-action-post.js index 07be67504..8fdf4c705 100644 --- a/lib/start-proxy-action-post.js +++ b/lib/start-proxy-action-post.js @@ -34336,127 +34336,6 @@ var require_del = __commonJS({ } }); -// node_modules/tiny-each-async/index.js -var require_tiny_each_async = __commonJS({ - "node_modules/tiny-each-async/index.js"(exports2, module2) { - "use strict"; - module2.exports = function eachAsync(arr, parallelLimit, iteratorFn, cb) { - var pending = 0; - var index = 0; - var lastIndex = arr.length - 1; - var called = false; - var limit; - var callback; - var iterate; - if (typeof parallelLimit === "number") { - limit = parallelLimit; - iterate = iteratorFn; - callback = cb || function noop() { - }; - } else { - iterate = parallelLimit; - callback = iteratorFn || function noop() { - }; - limit = arr.length; - } - if (!arr.length) { - return callback(); - } - var iteratorLength = iterate.length; - var shouldCallNextIterator = function shouldCallNextIterator2() { - return !called && pending < limit && index < lastIndex; - }; - var iteratorCallback = function iteratorCallback2(err) { - if (called) { - return; - } - pending--; - if (err || index === lastIndex && !pending) { - called = true; - callback(err); - } else if (shouldCallNextIterator()) { - processIterator(++index); - } - }; - var processIterator = function processIterator2() { - pending++; - var args = iteratorLength === 2 ? [arr[index], iteratorCallback] : [arr[index], index, iteratorCallback]; - iterate.apply(null, args); - if (shouldCallNextIterator()) { - processIterator2(++index); - } - }; - processIterator(); - }; - } -}); - -// node_modules/get-folder-size/index.js -var require_get_folder_size = __commonJS({ - "node_modules/get-folder-size/index.js"(exports2, module2) { - "use strict"; - var fs2 = require("fs"); - var path2 = require("path"); - var eachAsync = require_tiny_each_async(); - function readSizeRecursive(seen, item, ignoreRegEx, callback) { - let cb; - let ignoreRegExp; - if (!callback) { - cb = ignoreRegEx; - ignoreRegExp = null; - } else { - cb = callback; - ignoreRegExp = ignoreRegEx; - } - fs2.lstat(item, function lstat(e, stats) { - let total = !e ? stats.size || 0 : 0; - if (stats) { - if (seen.has(stats.ino)) { - return cb(null, 0); - } - seen.add(stats.ino); - } - if (!e && stats.isDirectory()) { - fs2.readdir(item, (err, list) => { - if (err) { - return cb(err); - } - eachAsync( - list, - 5e3, - (dirItem, next) => { - readSizeRecursive( - seen, - path2.join(item, dirItem), - ignoreRegExp, - (error2, size) => { - if (!error2) { - total += size; - } - next(error2); - } - ); - }, - (finalErr) => { - cb(finalErr, total); - } - ); - }); - } else { - if (ignoreRegExp && ignoreRegExp.test(item)) { - total = 0; - } - cb(e, total); - } - }); - } - module2.exports = (...args) => { - args.unshift(/* @__PURE__ */ new Set()); - return readSizeRecursive(...args); - }; - } -}); - // node_modules/semver/internal/constants.js var require_constants9 = __commonJS({ "node_modules/semver/internal/constants.js"(exports2, module2) { @@ -36412,7 +36291,7 @@ var require_package = __commonJS({ "fast-deep-equal": "^3.1.3", "file-url": "^3.0.0", "follow-redirects": "^1.15.11", - "get-folder-size": "^2.0.1", + "get-folder-size": "^5.0.0", "js-yaml": "^4.1.0", jsonschema: "1.4.1", long: "^5.3.2", @@ -38076,7 +37955,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -38086,15 +37965,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core13.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -38758,7 +38637,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs2 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path2 = __importStar4(require("path")); @@ -38809,7 +38688,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core13.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await4(fs2.promises.lstat(searchPath)); } catch (err) { @@ -38881,7 +38760,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core13.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -38897,7 +38776,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -40221,7 +40100,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob2 = __importStar4(require_glob2()); var io6 = __importStar4(require_io()); @@ -40274,7 +40153,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path2.relative(workspace, file).replace(new RegExp(`\\${path2.sep}`, "g"), "/"); - core13.debug(`Matched: ${relativeFile}`); + core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -40304,7 +40183,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core13.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -40315,10 +40194,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core13.debug(err.message); + core14.debug(err.message); } versionOutput = versionOutput.trim(); - core13.debug(versionOutput); + core14.debug(versionOutput); return versionOutput; }); } @@ -40326,7 +40205,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core13.debug(`zstd version: ${version}`); + core14.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -75638,7 +75517,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -75680,7 +75559,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -75735,14 +75614,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core13.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core13.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -75813,7 +75692,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -75874,9 +75753,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core13.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core13.debug(`${name} - Error is not retryable`); + core14.debug(`${name} - Error is not retryable`); break; } yield sleep(delay); @@ -75981,7 +75860,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -76019,7 +75898,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core13.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -76053,7 +75932,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -76103,7 +75982,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core13.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -76114,7 +75993,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core13.debug("Unable to validate download, no Content-Length header"); + core14.debug("Unable to validate download, no Content-Length header"); } }); } @@ -76234,7 +76113,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core13.debug("Unable to determine content length, downloading file with http-client..."); + core14.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -76314,7 +76193,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -76334,9 +76213,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core13.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -76373,12 +76252,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Download concurrency: ${result.downloadConcurrency}`); - core13.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core13.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core13.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core13.debug(`Lookup only: ${result.lookupOnly}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Download concurrency: ${result.downloadConcurrency}`); + core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core14.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -76558,7 +76437,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs2 = __importStar4(require("fs")); @@ -76576,7 +76455,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core13.debug(`Resource Url: ${url}`); + core14.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -76604,7 +76483,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core13.isDebug()) { + if (core14.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -76617,9 +76496,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core13.setSecret(cacheDownloadUrl); - core13.debug(`Cache Result:`); - core13.debug(JSON.stringify(cacheResult)); + core14.setSecret(cacheDownloadUrl); + core14.debug(`Cache Result:`); + core14.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -76634,10 +76513,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core13.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core13.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -76682,7 +76561,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core13.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -76704,7 +76583,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core13.debug("Awaiting all uploads"); + core14.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -76747,16 +76626,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core13.debug("Upload cache"); + core14.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core13.debug("Commiting cache"); + core14.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core13.info("Cache saved successfully"); + core14.info("Cache saved successfully"); } }); } @@ -82182,7 +82061,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var path2 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -82235,7 +82114,7 @@ var require_cache3 = __commonJS({ function restoreCache4(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -82251,8 +82130,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82270,19 +82149,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path2.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -82290,16 +82169,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core13.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82310,8 +82189,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82329,30 +82208,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core13.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core14.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core13.info(`Cache hit for restore-key: ${response.matchedKey}`); + core14.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core13.info(`Cache hit for: ${response.matchedKey}`); + core14.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path2.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive path: ${archivePath}`); - core13.debug(`Starting download of archive to: ${archivePath}`); + core14.debug(`Archive path: ${archivePath}`); + core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core13.isDebug()) { + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -82360,9 +82239,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core13.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -82371,7 +82250,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82380,7 +82259,7 @@ var require_cache3 = __commonJS({ function saveCache4(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -82399,26 +82278,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path2.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -82431,26 +82310,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core13.debug(`Saving Cache (ID: ${cacheId})`); + core14.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -82463,26 +82342,26 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path2.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } options.archiveSizeBytes = archiveFileSize; - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -82496,10 +82375,10 @@ var require_cache3 = __commonJS({ } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core13.debug(`Failed to reserve cache: ${error2}`); + core14.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core13.debug(`Attempting to upload cache located at: ${archivePath}`); + core14.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -82507,7 +82386,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core13.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); } @@ -82517,19 +82396,19 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -84636,14 +84515,14 @@ var require_retention = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getExpiration = void 0; var generated_1 = require_generated(); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getExpiration(retentionDays) { if (!retentionDays) { return void 0; } const maxRetentionDays = getRetentionDays(); if (maxRetentionDays && maxRetentionDays < retentionDays) { - core13.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); + core14.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); retentionDays = maxRetentionDays; } const expirationDate = /* @__PURE__ */ new Date(); @@ -85235,7 +85114,7 @@ var require_util11 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getBackendIdsFromToken = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var config_1 = require_config2(); var jwt_decode_1 = __importDefault4(require_jwt_decode_cjs()); var InvalidJwtError = new Error("Failed to get backend IDs: The provided JWT token is invalid and/or missing claims"); @@ -85261,8 +85140,8 @@ var require_util11 = __commonJS({ workflowRunBackendId: scopeParts[1], workflowJobRunBackendId: scopeParts[2] }; - core13.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); - core13.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); + core14.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); + core14.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); return ids; } throw InvalidJwtError; @@ -85333,7 +85212,7 @@ var require_blob_upload = __commonJS({ exports2.uploadZipToBlobStorage = void 0; var storage_blob_1 = require_dist7(); var config_1 = require_config2(); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var crypto = __importStar4(require("crypto")); var stream = __importStar4(require("stream")); var errors_1 = require_errors3(); @@ -85359,9 +85238,9 @@ var require_blob_upload = __commonJS({ const bufferSize = (0, config_1.getUploadChunkSize)(); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); - core13.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); + core14.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); const uploadCallback = (progress) => { - core13.info(`Uploaded bytes ${progress.loadedBytes}`); + core14.info(`Uploaded bytes ${progress.loadedBytes}`); uploadByteCount = progress.loadedBytes; lastProgressTime = Date.now(); }; @@ -85375,7 +85254,7 @@ var require_blob_upload = __commonJS({ const hashStream = crypto.createHash("sha256"); zipUploadStream.pipe(uploadStream); zipUploadStream.pipe(hashStream).setEncoding("hex"); - core13.info("Beginning upload of artifact content to blob storage"); + core14.info("Beginning upload of artifact content to blob storage"); try { yield Promise.race([ blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options), @@ -85389,12 +85268,12 @@ var require_blob_upload = __commonJS({ } finally { abortController.abort(); } - core13.info("Finished uploading artifact content to blob storage!"); + core14.info("Finished uploading artifact content to blob storage!"); hashStream.end(); sha256Hash = hashStream.read(); - core13.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`); + core14.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`); if (uploadByteCount === 0) { - core13.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); + core14.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); } return { uploadSize: uploadByteCount, @@ -88450,7 +88329,7 @@ var require_BufferList = __commonJS({ this.head = this.tail = null; this.length = 0; }; - BufferList.prototype.join = function join2(s) { + BufferList.prototype.join = function join3(s) { if (this.length === 0) return ""; var p = this.head; var ret = "" + p.data; @@ -109453,7 +109332,7 @@ var require_zip2 = __commonJS({ var stream = __importStar4(require("stream")); var promises_1 = require("fs/promises"); var archiver2 = __importStar4(require_archiver()); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var config_1 = require_config2(); exports2.DEFAULT_COMPRESSION_LEVEL = 6; var ZipUploadStream = class extends stream.Transform { @@ -109470,7 +109349,7 @@ var require_zip2 = __commonJS({ exports2.ZipUploadStream = ZipUploadStream; function createZipUploadStream(uploadSpecification, compressionLevel = exports2.DEFAULT_COMPRESSION_LEVEL) { return __awaiter4(this, void 0, void 0, function* () { - core13.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); + core14.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); const zip = archiver2.create("zip", { highWaterMark: (0, config_1.getUploadChunkSize)(), zlib: { level: compressionLevel } @@ -109494,8 +109373,8 @@ var require_zip2 = __commonJS({ } const bufferSize = (0, config_1.getUploadChunkSize)(); const zipUploadStream = new ZipUploadStream(bufferSize); - core13.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); - core13.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); + core14.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); + core14.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); zip.pipe(zipUploadStream); zip.finalize(); return zipUploadStream; @@ -109503,24 +109382,24 @@ var require_zip2 = __commonJS({ } exports2.createZipUploadStream = createZipUploadStream; var zipErrorCallback = (error2) => { - core13.error("An error has occurred while creating the zip file for upload"); - core13.info(error2); + core14.error("An error has occurred while creating the zip file for upload"); + core14.info(error2); throw new Error("An error has occurred during zip creation for the artifact"); }; var zipWarningCallback = (error2) => { if (error2.code === "ENOENT") { - core13.warning("ENOENT warning during artifact zip creation. No such file or directory"); - core13.info(error2); + core14.warning("ENOENT warning during artifact zip creation. No such file or directory"); + core14.info(error2); } else { - core13.warning(`A non-blocking warning has occurred during artifact zip creation: ${error2.code}`); - core13.info(error2); + core14.warning(`A non-blocking warning has occurred during artifact zip creation: ${error2.code}`); + core14.info(error2); } }; var zipFinishCallback = () => { - core13.debug("Zip stream for upload has finished."); + core14.debug("Zip stream for upload has finished."); }; var zipEndCallback = () => { - core13.debug("Zip stream for upload has ended."); + core14.debug("Zip stream for upload has ended."); }; } }); @@ -109585,7 +109464,7 @@ var require_upload_artifact = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadArtifact = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var retention_1 = require_retention(); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation(); var artifact_twirp_client_1 = require_artifact_twirp_client2(); @@ -109632,13 +109511,13 @@ var require_upload_artifact = __commonJS({ value: `sha256:${uploadResult.sha256Hash}` }); } - core13.info(`Finalizing artifact upload`); + core14.info(`Finalizing artifact upload`); const finalizeArtifactResp = yield artifactClient.FinalizeArtifact(finalizeArtifactReq); if (!finalizeArtifactResp.ok) { throw new errors_1.InvalidResponseError("FinalizeArtifact: response from backend was not ok"); } const artifactId = BigInt(finalizeArtifactResp.artifactId); - core13.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); + core14.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); return { size: uploadResult.uploadSize, digest: uploadResult.sha256Hash, @@ -116786,7 +116665,7 @@ var require_download_artifact = __commonJS({ var crypto = __importStar4(require("crypto")); var stream = __importStar4(require("stream")); var github2 = __importStar4(require_github2()); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var httpClient = __importStar4(require_lib()); var unzip_stream_1 = __importDefault4(require_unzip()); var user_agent_1 = require_user_agent2(); @@ -116822,7 +116701,7 @@ var require_download_artifact = __commonJS({ return yield streamExtractExternal(url, directory); } catch (error2) { retryCount++; - core13.debug(`Failed to download artifact after ${retryCount} retries due to ${error2.message}. Retrying in 5 seconds...`); + core14.debug(`Failed to download artifact after ${retryCount} retries due to ${error2.message}. Retrying in 5 seconds...`); yield new Promise((resolve2) => setTimeout(resolve2, 5e3)); } } @@ -116851,7 +116730,7 @@ var require_download_artifact = __commonJS({ extractStream.on("data", () => { timer.refresh(); }).on("error", (error2) => { - core13.debug(`response.message: Artifact download failed: ${error2.message}`); + core14.debug(`response.message: Artifact download failed: ${error2.message}`); clearTimeout(timer); reject(error2); }).pipe(unzip_stream_1.default.Extract({ path: directory })).on("close", () => { @@ -116859,7 +116738,7 @@ var require_download_artifact = __commonJS({ if (hashStream) { hashStream.end(); sha256Digest = hashStream.read(); - core13.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); + core14.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); } resolve2({ sha256Digest: `sha256:${sha256Digest}` }); }).on("error", (error2) => { @@ -116874,7 +116753,7 @@ var require_download_artifact = __commonJS({ const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path); const api = github2.getOctokit(token); let digestMismatch = false; - core13.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); + core14.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); const { headers, status } = yield api.rest.actions.downloadArtifact({ owner: repositoryOwner, repo: repositoryName, @@ -116891,16 +116770,16 @@ var require_download_artifact = __commonJS({ if (!location) { throw new Error(`Unable to redirect to artifact download url`); } - core13.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); + core14.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); try { - core13.info(`Starting download of artifact to: ${downloadPath}`); + core14.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(location, downloadPath); - core13.info(`Artifact download completed successfully.`); + core14.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core13.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core13.debug(`Expected digest: ${options.expectedHash}`); + core14.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core14.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error2) { @@ -116927,7 +116806,7 @@ var require_download_artifact = __commonJS({ Are you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`); } if (artifacts.length > 1) { - core13.warning("Multiple artifacts found, defaulting to first."); + core14.warning("Multiple artifacts found, defaulting to first."); } const signedReq = { workflowRunBackendId: artifacts[0].workflowRunBackendId, @@ -116935,16 +116814,16 @@ Are you trying to download from a different run? Try specifying a github-token w name: artifacts[0].name }; const { signedUrl } = yield artifactClient.GetSignedArtifactURL(signedReq); - core13.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); + core14.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); try { - core13.info(`Starting download of artifact to: ${downloadPath}`); + core14.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(signedUrl, downloadPath); - core13.info(`Artifact download completed successfully.`); + core14.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core13.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core13.debug(`Expected digest: ${options.expectedHash}`); + core14.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core14.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error2) { @@ -116957,10 +116836,10 @@ Are you trying to download from a different run? Try specifying a github-token w function resolveOrCreateDirectory(downloadPath = (0, config_1.getGitHubWorkspaceDir)()) { return __awaiter4(this, void 0, void 0, function* () { if (!(yield exists(downloadPath))) { - core13.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); + core14.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); yield promises_1.default.mkdir(downloadPath, { recursive: true }); } else { - core13.debug(`Artifact destination folder already exists: ${downloadPath}`); + core14.debug(`Artifact destination folder already exists: ${downloadPath}`); } return downloadPath; }); @@ -117001,7 +116880,7 @@ var require_retry_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRetryOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var defaultMaxRetryNumber = 5; var defaultExemptStatusCodes = [400, 401, 403, 404, 422]; function getRetryOptions(defaultOptions, retries = defaultMaxRetryNumber, exemptStatusCodes = defaultExemptStatusCodes) { @@ -117016,7 +116895,7 @@ var require_retry_options = __commonJS({ retryOptions.doNotRetry = exemptStatusCodes; } const requestOptions = Object.assign(Object.assign({}, defaultOptions.request), { retries }); - core13.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); + core14.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); return [retryOptions, requestOptions]; } exports2.getRetryOptions = getRetryOptions; @@ -117173,7 +117052,7 @@ var require_get_artifact = __commonJS({ exports2.getArtifactInternal = exports2.getArtifactPublic = void 0; var github_1 = require_github2(); var plugin_retry_1 = require_dist_node25(); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var utils_1 = require_utils13(); var retry_options_1 = require_retry_options(); var plugin_request_log_1 = require_dist_node24(); @@ -117211,7 +117090,7 @@ var require_get_artifact = __commonJS({ let artifact2 = getArtifactResp.data.artifacts[0]; if (getArtifactResp.data.artifacts.length > 1) { artifact2 = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0]; - core13.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); + core14.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); } return { artifact: { @@ -117244,7 +117123,7 @@ var require_get_artifact = __commonJS({ let artifact2 = res.artifacts[0]; if (res.artifacts.length > 1) { artifact2 = res.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0]; - core13.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); + core14.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); } return { artifact: { @@ -119192,7 +119071,7 @@ var require_requestUtils2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientRequest = exports2.retry = void 0; var utils_1 = require_utils14(); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var config_variables_1 = require_config_variables(); function retry3(name, operation, customErrorMessages, maxAttempts) { return __awaiter4(this, void 0, void 0, function* () { @@ -119219,13 +119098,13 @@ var require_requestUtils2 = __commonJS({ errorMessage = error2.message; } if (!isRetryable) { - core13.info(`${name} - Error is not retryable`); + core14.info(`${name} - Error is not retryable`); if (response) { (0, utils_1.displayHttpDiagnostics)(response); } break; } - core13.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); yield (0, utils_1.sleep)((0, utils_1.getExponentialRetryTimeInMilliseconds)(attempt)); attempt++; } @@ -119309,7 +119188,7 @@ var require_upload_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadHttpClient = void 0; var fs2 = __importStar4(require("fs")); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var tmp = __importStar4(require_tmp_promise()); var stream = __importStar4(require("stream")); var utils_1 = require_utils14(); @@ -119374,7 +119253,7 @@ var require_upload_http_client = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const FILE_CONCURRENCY = (0, config_variables_1.getUploadFileConcurrency)(); const MAX_CHUNK_SIZE = (0, config_variables_1.getUploadChunkSize)(); - core13.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); + core14.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); const parameters = []; let continueOnError = true; if (options) { @@ -119411,15 +119290,15 @@ var require_upload_http_client = __commonJS({ } const startTime = perf_hooks_1.performance.now(); const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); - if (core13.isDebug()) { - core13.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); + if (core14.isDebug()) { + core14.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); } uploadFileSize += uploadFileResult.successfulUploadSize; totalFileSize += uploadFileResult.totalSize; if (uploadFileResult.isSuccess === false) { failedItemsToReport.push(currentFileParameters.file); if (!continueOnError) { - core13.error(`aborting artifact upload`); + core14.error(`aborting artifact upload`); abortPendingFileUploads = true; } } @@ -119428,7 +119307,7 @@ var require_upload_http_client = __commonJS({ }))); this.statusReporter.stop(); this.uploadHttpManager.disposeAndReplaceAllClients(); - core13.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); + core14.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); return { uploadSize: uploadFileSize, totalSize: totalFileSize, @@ -119454,16 +119333,16 @@ var require_upload_http_client = __commonJS({ let uploadFileSize = 0; let isGzip = true; if (!isFIFO && totalFileSize < 65536) { - core13.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); + core14.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); const buffer = yield (0, upload_gzip_1.createGZipFileInBuffer)(parameters.file); let openUploadStream; if (totalFileSize < buffer.byteLength) { - core13.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core14.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); openUploadStream = () => fs2.createReadStream(parameters.file); isGzip = false; uploadFileSize = totalFileSize; } else { - core13.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); + core14.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); openUploadStream = () => { const passThrough = new stream.PassThrough(); passThrough.end(buffer); @@ -119475,7 +119354,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += uploadFileSize; - core13.warning(`Aborting upload for ${parameters.file} due to failure`); + core14.warning(`Aborting upload for ${parameters.file} due to failure`); } return { isSuccess: isUploadSuccessful, @@ -119484,16 +119363,16 @@ var require_upload_http_client = __commonJS({ }; } else { const tempFile = yield tmp.file(); - core13.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); + core14.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); uploadFileSize = yield (0, upload_gzip_1.createGZipFileOnDisk)(parameters.file, tempFile.path); let uploadFilePath = tempFile.path; if (!isFIFO && totalFileSize < uploadFileSize) { - core13.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core14.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); uploadFileSize = totalFileSize; uploadFilePath = parameters.file; isGzip = false; } else { - core13.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); + core14.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); } let abortFileUpload = false; while (offset < uploadFileSize) { @@ -119513,7 +119392,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += chunkSize; - core13.warning(`Aborting upload for ${parameters.file} due to failure`); + core14.warning(`Aborting upload for ${parameters.file} due to failure`); abortFileUpload = true; } else { if (uploadFileSize > 8388608) { @@ -119521,7 +119400,7 @@ var require_upload_http_client = __commonJS({ } } } - core13.debug(`deleting temporary gzip file ${tempFile.path}`); + core14.debug(`deleting temporary gzip file ${tempFile.path}`); yield tempFile.cleanup(); return { isSuccess: isUploadSuccessful, @@ -119560,7 +119439,7 @@ var require_upload_http_client = __commonJS({ if (response) { (0, utils_1.displayHttpDiagnostics)(response); } - core13.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); + core14.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); return true; } return false; @@ -119568,14 +119447,14 @@ var require_upload_http_client = __commonJS({ const backOff = (retryAfterValue) => __awaiter4(this, void 0, void 0, function* () { this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core13.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); + core14.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core13.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); + core14.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); yield (0, utils_1.sleep)(backoffTime); } - core13.info(`Finished backoff for retry #${retryCount}, continuing with upload`); + core14.info(`Finished backoff for retry #${retryCount}, continuing with upload`); return; }); while (retryCount <= retryLimit) { @@ -119583,7 +119462,7 @@ var require_upload_http_client = __commonJS({ try { response = yield uploadChunkRequest(); } catch (error2) { - core13.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); + core14.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); console.log(error2); if (incrementAndCheckRetryLimit()) { return false; @@ -119595,13 +119474,13 @@ var require_upload_http_client = __commonJS({ if ((0, utils_1.isSuccessStatusCode)(response.message.statusCode)) { return true; } else if ((0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core13.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); + core14.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); if (incrementAndCheckRetryLimit(response)) { return false; } (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { - core13.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); + core14.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); (0, utils_1.displayHttpDiagnostics)(response); return false; } @@ -119619,7 +119498,7 @@ var require_upload_http_client = __commonJS({ resourceUrl.searchParams.append("artifactName", artifactName); const parameters = { Size: size }; const data = JSON.stringify(parameters, null, 2); - core13.debug(`URL is ${resourceUrl.toString()}`); + core14.debug(`URL is ${resourceUrl.toString()}`); const client = this.uploadHttpManager.getClient(0); const headers = (0, utils_1.getUploadHeaders)("application/json", false); const customErrorMessages = /* @__PURE__ */ new Map([ @@ -119632,7 +119511,7 @@ var require_upload_http_client = __commonJS({ return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); yield response.readBody(); - core13.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); + core14.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); }); } }; @@ -119701,7 +119580,7 @@ var require_download_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DownloadHttpClient = void 0; var fs2 = __importStar4(require("fs")); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var zlib = __importStar4(require("zlib")); var utils_1 = require_utils14(); var url_1 = require("url"); @@ -119755,11 +119634,11 @@ var require_download_http_client = __commonJS({ downloadSingleArtifact(downloadItems) { return __awaiter4(this, void 0, void 0, function* () { const DOWNLOAD_CONCURRENCY = (0, config_variables_1.getDownloadFileConcurrency)(); - core13.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); + core14.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; let currentFile = 0; let downloadedFiles = 0; - core13.info(`Total number of files that will be downloaded: ${downloadItems.length}`); + core14.info(`Total number of files that will be downloaded: ${downloadItems.length}`); this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length); this.statusReporter.start(); yield Promise.all(parallelDownloads.map((index) => __awaiter4(this, void 0, void 0, function* () { @@ -119768,8 +119647,8 @@ var require_download_http_client = __commonJS({ currentFile += 1; const startTime = perf_hooks_1.performance.now(); yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); - if (core13.isDebug()) { - core13.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); + if (core14.isDebug()) { + core14.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); } this.statusReporter.incrementProcessedCount(); } @@ -119807,19 +119686,19 @@ var require_download_http_client = __commonJS({ } else { this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core13.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); + core14.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core13.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); + core14.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); yield (0, utils_1.sleep)(backoffTime); } - core13.info(`Finished backoff for retry #${retryCount}, continuing with download`); + core14.info(`Finished backoff for retry #${retryCount}, continuing with download`); } }); const isAllBytesReceived = (expected, received) => { if (!expected || !received || process.env["ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION"]) { - core13.info("Skipping download validation."); + core14.info("Skipping download validation."); return true; } return parseInt(expected) === received; @@ -119840,7 +119719,7 @@ var require_download_http_client = __commonJS({ try { response = yield makeDownloadRequest(); } catch (error2) { - core13.info("An error occurred while attempting to download a file"); + core14.info("An error occurred while attempting to download a file"); console.log(error2); yield backOff(); continue; @@ -119860,7 +119739,7 @@ var require_download_http_client = __commonJS({ } } if (forceRetry || (0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core13.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); + core14.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); resetDestinationStream(downloadPath); (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { @@ -119882,29 +119761,29 @@ var require_download_http_client = __commonJS({ if (isGzip) { const gunzip = zlib.createGunzip(); response.message.on("error", (error2) => { - core13.info(`An error occurred while attempting to read the response stream`); + core14.info(`An error occurred while attempting to read the response stream`); gunzip.close(); destinationStream.close(); reject(error2); }).pipe(gunzip).on("error", (error2) => { - core13.info(`An error occurred while attempting to decompress the response stream`); + core14.info(`An error occurred while attempting to decompress the response stream`); destinationStream.close(); reject(error2); }).pipe(destinationStream).on("close", () => { resolve2(); }).on("error", (error2) => { - core13.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core14.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error2); }); } else { response.message.on("error", (error2) => { - core13.info(`An error occurred while attempting to read the response stream`); + core14.info(`An error occurred while attempting to read the response stream`); destinationStream.close(); reject(error2); }).pipe(destinationStream).on("close", () => { resolve2(); }).on("error", (error2) => { - core13.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core14.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error2); }); } @@ -120043,7 +119922,7 @@ var require_artifact_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultArtifactClient = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var upload_specification_1 = require_upload_specification(); var upload_http_client_1 = require_upload_http_client(); var utils_1 = require_utils14(); @@ -120064,7 +119943,7 @@ var require_artifact_client = __commonJS({ */ uploadArtifact(name, files, rootDirectory, options) { return __awaiter4(this, void 0, void 0, function* () { - core13.info(`Starting artifact upload + core14.info(`Starting artifact upload For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`); (0, path_and_artifact_name_validation_1.checkArtifactName)(name); const uploadSpecification = (0, upload_specification_1.getUploadSpecification)(name, rootDirectory, files); @@ -120076,24 +119955,24 @@ For more detailed logs during the artifact upload process, enable step-debugging }; const uploadHttpClient = new upload_http_client_1.UploadHttpClient(); if (uploadSpecification.length === 0) { - core13.warning(`No files found that can be uploaded`); + core14.warning(`No files found that can be uploaded`); } else { const response = yield uploadHttpClient.createArtifactInFileContainer(name, options); if (!response.fileContainerResourceUrl) { - core13.debug(response.toString()); + core14.debug(response.toString()); throw new Error("No URL provided by the Artifact Service to upload an artifact to"); } - core13.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); - core13.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); + core14.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); + core14.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); - core13.info(`File upload process has finished. Finalizing the artifact upload`); + core14.info(`File upload process has finished. Finalizing the artifact upload`); yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); if (uploadResult.failedItems.length > 0) { - core13.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); + core14.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); } else { - core13.info(`Artifact has been finalized. All files have been successfully uploaded!`); + core14.info(`Artifact has been finalized. All files have been successfully uploaded!`); } - core13.info(` + core14.info(` The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage @@ -120127,10 +120006,10 @@ Note: The size of downloaded zips can differ significantly from the reported siz path2 = (0, path_1.resolve)(path2); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path2, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { - core13.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); + core14.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); - core13.info("Directory structure has been set up for the artifact"); + core14.info("Directory structure has been set up for the artifact"); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); } @@ -120146,7 +120025,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz const response = []; const artifacts = yield downloadHttpClient.listArtifacts(); if (artifacts.count === 0) { - core13.info("Unable to find any artifacts for the associated workflow"); + core14.info("Unable to find any artifacts for the associated workflow"); return response; } if (!path2) { @@ -120158,11 +120037,11 @@ Note: The size of downloaded zips can differ significantly from the reported siz while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; - core13.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); + core14.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path2, true); if (downloadSpecification.filesToDownload.length === 0) { - core13.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); + core14.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); @@ -120394,7 +120273,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -120417,10 +120296,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core13.info(err.message); + core14.info(err.message); } const seconds = this.getSleepAmount(); - core13.info(`Waiting ${seconds} seconds before trying again`); + core14.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -120500,7 +120379,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs2 = __importStar4(require("fs")); @@ -120529,8 +120408,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path2.join(_getTempDirectory(), crypto.randomUUID()); yield io6.mkdirP(path2.dirname(dest)); - core13.debug(`Downloading ${url}`); - core13.debug(`Destination ${dest}`); + core14.debug(`Downloading ${url}`); + core14.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -120557,7 +120436,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core13.debug("set auth"); + core14.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -120566,7 +120445,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError(response.message.statusCode); - core13.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core14.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream.pipeline); @@ -120575,16 +120454,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs2.createWriteStream(dest)); - core13.debug("download complete"); + core14.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core13.debug("download failed"); + core14.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core13.debug(`Failed to delete '${dest}'. ${err.message}`); + core14.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -120599,7 +120478,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core13.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -120649,7 +120528,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core13.debug("Checking tar --version"); + core14.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -120659,7 +120538,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core13.debug(versionOutput.trim()); + core14.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -120667,7 +120546,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core13.isDebug() && !flags.includes("v")) { + if (core14.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -120699,7 +120578,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core13.isDebug()) { + if (core14.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -120744,7 +120623,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core13.debug(`Using pwsh at path: ${pwshPath}`); + core14.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -120764,7 +120643,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core13.debug(`Using powershell at path: ${powershellPath}`); + core14.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -120773,7 +120652,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core13.isDebug()) { + if (!core14.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -120784,8 +120663,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch = arch || os.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch}`); - core13.debug(`source dir: ${sourceDir}`); + core14.debug(`Caching tool ${tool} ${version} ${arch}`); + core14.debug(`source dir: ${sourceDir}`); if (!fs2.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -120803,14 +120682,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch = arch || os.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch}`); - core13.debug(`source file: ${sourceFile}`); + core14.debug(`Caching tool ${tool} ${version} ${arch}`); + core14.debug(`source file: ${sourceFile}`); if (!fs2.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch); const destPath = path2.join(destFolder, targetFile); - core13.debug(`destination file ${destPath}`); + core14.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch); return destFolder; @@ -120834,12 +120713,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path2.join(_getCacheDirectory(), toolName, versionSpec, arch); - core13.debug(`checking cache: ${cachePath}`); + core14.debug(`checking cache: ${cachePath}`); if (fs2.existsSync(cachePath) && fs2.existsSync(`${cachePath}.complete`)) { - core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); + core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { - core13.debug("not found"); + core14.debug("not found"); } } return toolPath; @@ -120870,7 +120749,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core13.debug("set auth"); + core14.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -120891,7 +120770,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core13.debug("Invalid json"); + core14.debug("Invalid json"); } } return releases; @@ -120917,7 +120796,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path2.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); - core13.debug(`destination ${folderPath}`); + core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -120929,19 +120808,19 @@ var require_tool_cache = __commonJS({ const folderPath = path2.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; fs2.writeFileSync(markerPath, ""); - core13.debug("finished caching tool"); + core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core13.debug(`isExplicit: ${c}`); + core14.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core13.debug(`explicit? ${valid3}`); + core14.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core13.debug(`evaluating ${versions.length} versions`); + core14.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -120957,9 +120836,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core13.debug(`matched: ${version}`); + core14.debug(`matched: ${version}`); } else { - core13.debug("match not found"); + core14.debug("match not found"); } return version; } @@ -121568,7 +121447,7 @@ var require_internal_glob_options_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -121580,23 +121459,23 @@ var require_internal_glob_options_helper2 = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core13.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core13.debug(`matchDirectories '${result.matchDirectories}'`); + core14.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core13.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -122280,7 +122159,7 @@ var require_internal_globber2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs2 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); var path2 = __importStar4(require("path")); @@ -122333,7 +122212,7 @@ var require_internal_globber2 = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core13.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await4(fs2.promises.lstat(searchPath)); } catch (err) { @@ -122408,7 +122287,7 @@ var require_internal_globber2 = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core13.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -122424,7 +122303,7 @@ var require_internal_globber2 = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -122517,7 +122396,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; var crypto = __importStar4(require("crypto")); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs2 = __importStar4(require("fs")); var stream = __importStar4(require("stream")); var util = __importStar4(require("util")); @@ -122526,7 +122405,7 @@ var require_internal_hash_files = __commonJS({ var _a, e_1, _b, _c; var _d; return __awaiter4(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core13.info : core13.debug; + const writeDelegate = verbose ? core14.info : core14.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto.createHash("sha256"); @@ -122633,20 +122512,74 @@ var require_glob4 = __commonJS({ }); // src/start-proxy-action-post.ts -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); // src/actions-util.ts -var core3 = __toESM(require_core()); +var core4 = __toESM(require_core()); var toolrunner = __toESM(require_toolrunner()); var github = __toESM(require_github()); var io2 = __toESM(require_io()); // src/util.ts -var core2 = __toESM(require_core()); +var core3 = __toESM(require_core()); var exec = __toESM(require_exec()); var io = __toESM(require_io()); var import_del = __toESM(require_del()); -var import_get_folder_size = __toESM(require_get_folder_size()); + +// node_modules/get-folder-size/index.js +var import_node_path = require("node:path"); +async function getFolderSize(itemPath, options) { + return await core(itemPath, options, { errors: true }); +} +getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); +getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); +async function core(rootItemPath, options = {}, returnType = {}) { + const fs2 = options.fs || await import("node:fs/promises"); + let folderSize = 0n; + const foundInos = /* @__PURE__ */ new Set(); + const errors = []; + await processItem(rootItemPath); + async function processItem(itemPath) { + if (options.ignore?.test(itemPath)) return; + const stats = returnType.strict ? await fs2.lstat(itemPath, { bigint: true }) : await fs2.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + if (typeof stats !== "object") return; + if (!foundInos.has(stats.ino)) { + foundInos.add(stats.ino); + folderSize += stats.size; + } + if (stats.isDirectory()) { + const directoryItems = returnType.strict ? await fs2.readdir(itemPath) : await fs2.readdir(itemPath).catch((error2) => errors.push(error2)); + if (typeof directoryItems !== "object") return; + await Promise.all( + directoryItems.map( + (directoryItem) => processItem((0, import_node_path.join)(itemPath, directoryItem)) + ) + ); + } + } + if (!options.bigint) { + if (folderSize > BigInt(Number.MAX_SAFE_INTEGER)) { + const error2 = new RangeError( + "The folder size is too large to return as a Number. You can instruct this package to return a BigInt instead." + ); + if (returnType.strict) { + throw error2; + } + errors.push(error2); + folderSize = Number.MAX_SAFE_INTEGER; + } else { + folderSize = Number(folderSize); + } + } + if (returnType.errors) { + return { + size: folderSize, + errors: errors.length > 0 ? errors : null + }; + } else { + return folderSize; + } +} // node_modules/js-yaml/dist/js-yaml.mjs function isNothing(subject) { @@ -123217,7 +123150,7 @@ var json = failsafe.extend({ float ] }); -var core = json; +var core2 = json; var YAML_DATE_REGEXP = new RegExp( "^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$" ); @@ -123431,7 +123364,7 @@ var set = new type("tag:yaml.org,2002:set", { resolve: resolveYamlSet, construct: constructYamlSet }); -var _default = core.extend({ +var _default = core2.extend({ implicit: [ timestamp, merge @@ -125291,7 +125224,7 @@ function checkGitHubVersionInRange(version, logger) { ); } hasBeenWarnedAboutVersion = true; - core2.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true); + core3.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true); } function apiVersionInRange(version, minimumVersion2, maximumVersion2) { if (!semver.satisfies(version, `>=${minimumVersion2}`)) { @@ -125321,7 +125254,7 @@ function getErrorMessage(error2) { // src/actions-util.ts var pkg = require_package(); var getRequiredInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); if (!value) { throw new ConfigurationError(`Input required and not supplied: ${name}`); } @@ -125336,7 +125269,7 @@ function getActionVersion() { } var persistedInputsKey = "persisted_inputs"; var restoreInputs = function() { - const persistedInputs = core3.getState(persistedInputsKey); + const persistedInputs = core4.getState(persistedInputsKey); if (persistedInputs) { for (const [name, value] of JSON.parse(persistedInputs)) { process.env[name] = value; @@ -125345,7 +125278,7 @@ var restoreInputs = function() { }; // src/api-client.ts -var core4 = __toESM(require_core()); +var core5 = __toESM(require_core()); var githubUtils = __toESM(require_utils4()); var retry = __toESM(require_dist_node15()); var import_console_log_level = __toESM(require_console_log_level()); @@ -125410,7 +125343,7 @@ var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { var supportedAnalysisKinds = new Set(Object.values(AnalysisKind)); // src/caching-utils.ts -var core5 = __toESM(require_core()); +var core6 = __toESM(require_core()); // src/feature-flags.ts var semver3 = __toESM(require_semver2()); @@ -125419,14 +125352,14 @@ var semver3 = __toESM(require_semver2()); var actionsCache = __toESM(require_cache3()); // src/git-utils.ts -var core6 = __toESM(require_core()); +var core7 = __toESM(require_core()); var toolrunner2 = __toESM(require_toolrunner()); var io3 = __toESM(require_io()); // src/logging.ts -var core7 = __toESM(require_core()); +var core8 = __toESM(require_core()); function getActionsLogger() { - return core7; + return core8; } // src/overlay-database-utils.ts @@ -125658,7 +125591,7 @@ async function getConfig(tempDir, logger) { // src/debug-artifacts.ts var artifact = __toESM(require_artifact2()); var artifactLegacy = __toESM(require_artifact_client2()); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); var import_archiver = __toESM(require_archiver()); var import_del3 = __toESM(require_del()); @@ -125667,10 +125600,10 @@ var io5 = __toESM(require_io()); var import_del2 = __toESM(require_del()); // src/autobuild.ts -var core10 = __toESM(require_core()); +var core11 = __toESM(require_core()); // src/codeql.ts -var core9 = __toESM(require_core()); +var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); // src/cli-errors.ts @@ -125818,7 +125751,7 @@ var toolcache = __toESM(require_tool_cache()); var semver5 = __toESM(require_semver2()); // src/tools-download.ts -var core8 = __toESM(require_core()); +var core9 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); var toolcache2 = __toESM(require_tool_cache()); var import_follow_redirects = __toESM(require_follow_redirects()); @@ -125849,7 +125782,7 @@ async function runWrapper() { const logger = getActionsLogger(); try { restoreInputs(); - const pid = core12.getState("proxy-process-pid"); + const pid = core13.getState("proxy-process-pid"); if (pid) { process.kill(Number(pid)); } @@ -125857,8 +125790,8 @@ async function runWrapper() { getTemporaryDirectory(), logger ); - if (config && config.debugMode || core12.isDebug()) { - const logFilePath = core12.getState("proxy-log-file"); + if (config && config.debugMode || core13.isDebug()) { + const logFilePath = core13.getState("proxy-log-file"); logger.info( "Debug mode is on. Uploading proxy log as Actions debugging artifact..." ); diff --git a/lib/start-proxy-action.js b/lib/start-proxy-action.js index d3d75cad1..27b534a82 100644 --- a/lib/start-proxy-action.js +++ b/lib/start-proxy-action.js @@ -21915,7 +21915,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core7 = __importStar(require_core()); + var core8 = __importStar(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -21938,10 +21938,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core7.info(err.message); + core8.info(err.message); } const seconds = this.getSleepAmount(); - core7.info(`Waiting ${seconds} seconds before trying again`); + core8.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -22021,7 +22021,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core7 = __importStar(require_core()); + var core8 = __importStar(require_core()); var io3 = __importStar(require_io()); var crypto = __importStar(require("crypto")); var fs = __importStar(require("fs")); @@ -22050,8 +22050,8 @@ var require_tool_cache = __commonJS({ return __awaiter(this, void 0, void 0, function* () { dest = dest || path2.join(_getTempDirectory(), crypto.randomUUID()); yield io3.mkdirP(path2.dirname(dest)); - core7.debug(`Downloading ${url}`); - core7.debug(`Destination ${dest}`); + core8.debug(`Downloading ${url}`); + core8.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -22078,7 +22078,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core7.debug("set auth"); + core8.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -22087,7 +22087,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError(response.message.statusCode); - core7.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core8.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream.pipeline); @@ -22096,16 +22096,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs.createWriteStream(dest)); - core7.debug("download complete"); + core8.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core7.debug("download failed"); + core8.debug("download failed"); try { yield io3.rmRF(dest); } catch (err) { - core7.debug(`Failed to delete '${dest}'. ${err.message}`); + core8.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -22120,7 +22120,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core7.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core8.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -22170,7 +22170,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core7.debug("Checking tar --version"); + core8.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -22180,7 +22180,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core7.debug(versionOutput.trim()); + core8.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -22188,7 +22188,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core7.isDebug() && !flags.includes("v")) { + if (core8.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -22220,7 +22220,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core7.isDebug()) { + if (core8.isDebug()) { args.push("-v"); } const xarPath = yield io3.which("xar", true); @@ -22265,7 +22265,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core7.debug(`Using pwsh at path: ${pwshPath}`); + core8.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -22285,7 +22285,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io3.which("powershell", true); - core7.debug(`Using powershell at path: ${powershellPath}`); + core8.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -22294,7 +22294,7 @@ var require_tool_cache = __commonJS({ return __awaiter(this, void 0, void 0, function* () { const unzipPath = yield io3.which("unzip", true); const args = [file]; - if (!core7.isDebug()) { + if (!core8.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -22305,8 +22305,8 @@ var require_tool_cache = __commonJS({ return __awaiter(this, void 0, void 0, function* () { version = semver2.clean(version) || version; arch = arch || os.arch(); - core7.debug(`Caching tool ${tool} ${version} ${arch}`); - core7.debug(`source dir: ${sourceDir}`); + core8.debug(`Caching tool ${tool} ${version} ${arch}`); + core8.debug(`source dir: ${sourceDir}`); if (!fs.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -22324,14 +22324,14 @@ var require_tool_cache = __commonJS({ return __awaiter(this, void 0, void 0, function* () { version = semver2.clean(version) || version; arch = arch || os.arch(); - core7.debug(`Caching tool ${tool} ${version} ${arch}`); - core7.debug(`source file: ${sourceFile}`); + core8.debug(`Caching tool ${tool} ${version} ${arch}`); + core8.debug(`source file: ${sourceFile}`); if (!fs.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch); const destPath = path2.join(destFolder, targetFile); - core7.debug(`destination file ${destPath}`); + core8.debug(`destination file ${destPath}`); yield io3.cp(sourceFile, destPath); _completeToolPath(tool, version, arch); return destFolder; @@ -22355,12 +22355,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver2.clean(versionSpec) || ""; const cachePath = path2.join(_getCacheDirectory(), toolName, versionSpec, arch); - core7.debug(`checking cache: ${cachePath}`); + core8.debug(`checking cache: ${cachePath}`); if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) { - core7.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); + core8.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { - core7.debug("not found"); + core8.debug("not found"); } } return toolPath; @@ -22391,7 +22391,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core7.debug("set auth"); + core8.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -22412,7 +22412,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core7.debug("Invalid json"); + core8.debug("Invalid json"); } } return releases; @@ -22438,7 +22438,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch) { return __awaiter(this, void 0, void 0, function* () { const folderPath = path2.join(_getCacheDirectory(), tool, semver2.clean(version) || version, arch || ""); - core7.debug(`destination ${folderPath}`); + core8.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io3.rmRF(folderPath); yield io3.rmRF(markerPath); @@ -22450,19 +22450,19 @@ var require_tool_cache = __commonJS({ const folderPath = path2.join(_getCacheDirectory(), tool, semver2.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; fs.writeFileSync(markerPath, ""); - core7.debug("finished caching tool"); + core8.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver2.clean(versionSpec) || ""; - core7.debug(`isExplicit: ${c}`); + core8.debug(`isExplicit: ${c}`); const valid = semver2.valid(c) != null; - core7.debug(`explicit? ${valid}`); + core8.debug(`explicit? ${valid}`); return valid; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core7.debug(`evaluating ${versions.length} versions`); + core8.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver2.gt(a, b)) { return 1; @@ -22478,9 +22478,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core7.debug(`matched: ${version}`); + core8.debug(`matched: ${version}`); } else { - core7.debug("match not found"); + core8.debug("match not found"); } return version; } @@ -54772,127 +54772,6 @@ var require_del = __commonJS({ } }); -// node_modules/tiny-each-async/index.js -var require_tiny_each_async = __commonJS({ - "node_modules/tiny-each-async/index.js"(exports2, module2) { - "use strict"; - module2.exports = function eachAsync(arr, parallelLimit, iteratorFn, cb) { - var pending = 0; - var index = 0; - var lastIndex = arr.length - 1; - var called = false; - var limit; - var callback; - var iterate; - if (typeof parallelLimit === "number") { - limit = parallelLimit; - iterate = iteratorFn; - callback = cb || function noop() { - }; - } else { - iterate = parallelLimit; - callback = iteratorFn || function noop() { - }; - limit = arr.length; - } - if (!arr.length) { - return callback(); - } - var iteratorLength = iterate.length; - var shouldCallNextIterator = function shouldCallNextIterator2() { - return !called && pending < limit && index < lastIndex; - }; - var iteratorCallback = function iteratorCallback2(err) { - if (called) { - return; - } - pending--; - if (err || index === lastIndex && !pending) { - called = true; - callback(err); - } else if (shouldCallNextIterator()) { - processIterator(++index); - } - }; - var processIterator = function processIterator2() { - pending++; - var args = iteratorLength === 2 ? [arr[index], iteratorCallback] : [arr[index], index, iteratorCallback]; - iterate.apply(null, args); - if (shouldCallNextIterator()) { - processIterator2(++index); - } - }; - processIterator(); - }; - } -}); - -// node_modules/get-folder-size/index.js -var require_get_folder_size = __commonJS({ - "node_modules/get-folder-size/index.js"(exports2, module2) { - "use strict"; - var fs = require("fs"); - var path2 = require("path"); - var eachAsync = require_tiny_each_async(); - function readSizeRecursive(seen, item, ignoreRegEx, callback) { - let cb; - let ignoreRegExp; - if (!callback) { - cb = ignoreRegEx; - ignoreRegExp = null; - } else { - cb = callback; - ignoreRegExp = ignoreRegEx; - } - fs.lstat(item, function lstat(e, stats) { - let total = !e ? stats.size || 0 : 0; - if (stats) { - if (seen.has(stats.ino)) { - return cb(null, 0); - } - seen.add(stats.ino); - } - if (!e && stats.isDirectory()) { - fs.readdir(item, (err, list) => { - if (err) { - return cb(err); - } - eachAsync( - list, - 5e3, - (dirItem, next) => { - readSizeRecursive( - seen, - path2.join(item, dirItem), - ignoreRegExp, - (error2, size) => { - if (!error2) { - total += size; - } - next(error2); - } - ); - }, - (finalErr) => { - cb(finalErr, total); - } - ); - }); - } else { - if (ignoreRegExp && ignoreRegExp.test(item)) { - total = 0; - } - cb(e, total); - } - }); - } - module2.exports = (...args) => { - args.unshift(/* @__PURE__ */ new Set()); - return readSizeRecursive(...args); - }; - } -}); - // package.json var require_package = __commonJS({ "package.json"(exports2, module2) { @@ -54940,7 +54819,7 @@ var require_package = __commonJS({ "fast-deep-equal": "^3.1.3", "file-url": "^3.0.0", "follow-redirects": "^1.15.11", - "get-folder-size": "^2.0.1", + "get-folder-size": "^5.0.0", "js-yaml": "^4.1.0", jsonschema: "1.4.1", long: "^5.3.2", @@ -55008,22 +54887,76 @@ var require_package = __commonJS({ // src/start-proxy-action.ts var import_child_process = require("child_process"); var path = __toESM(require("path")); -var core6 = __toESM(require_core()); +var core7 = __toESM(require_core()); var toolcache = __toESM(require_tool_cache()); var import_node_forge = __toESM(require_lib2()); // src/actions-util.ts -var core3 = __toESM(require_core()); +var core4 = __toESM(require_core()); var toolrunner = __toESM(require_toolrunner()); var github = __toESM(require_github()); var io2 = __toESM(require_io()); // src/util.ts -var core2 = __toESM(require_core()); +var core3 = __toESM(require_core()); var exec = __toESM(require_exec()); var io = __toESM(require_io()); var import_del = __toESM(require_del()); -var import_get_folder_size = __toESM(require_get_folder_size()); + +// node_modules/get-folder-size/index.js +var import_node_path = require("node:path"); +async function getFolderSize(itemPath, options) { + return await core(itemPath, options, { errors: true }); +} +getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); +getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); +async function core(rootItemPath, options = {}, returnType = {}) { + const fs = options.fs || await import("node:fs/promises"); + let folderSize = 0n; + const foundInos = /* @__PURE__ */ new Set(); + const errors = []; + await processItem(rootItemPath); + async function processItem(itemPath) { + if (options.ignore?.test(itemPath)) return; + const stats = returnType.strict ? await fs.lstat(itemPath, { bigint: true }) : await fs.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + if (typeof stats !== "object") return; + if (!foundInos.has(stats.ino)) { + foundInos.add(stats.ino); + folderSize += stats.size; + } + if (stats.isDirectory()) { + const directoryItems = returnType.strict ? await fs.readdir(itemPath) : await fs.readdir(itemPath).catch((error2) => errors.push(error2)); + if (typeof directoryItems !== "object") return; + await Promise.all( + directoryItems.map( + (directoryItem) => processItem((0, import_node_path.join)(itemPath, directoryItem)) + ) + ); + } + } + if (!options.bigint) { + if (folderSize > BigInt(Number.MAX_SAFE_INTEGER)) { + const error2 = new RangeError( + "The folder size is too large to return as a Number. You can instruct this package to return a BigInt instead." + ); + if (returnType.strict) { + throw error2; + } + errors.push(error2); + folderSize = Number.MAX_SAFE_INTEGER; + } else { + folderSize = Number(folderSize); + } + } + if (returnType.errors) { + return { + size: folderSize, + errors: errors.length > 0 ? errors : null + }; + } else { + return folderSize; + } +} // node_modules/js-yaml/dist/js-yaml.mjs function isNothing(subject) { @@ -55594,7 +55527,7 @@ var json = failsafe.extend({ float ] }); -var core = json; +var core2 = json; var YAML_DATE_REGEXP = new RegExp( "^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$" ); @@ -55808,7 +55741,7 @@ var set = new type("tag:yaml.org,2002:set", { resolve: resolveYamlSet, construct: constructYamlSet }); -var _default = core.extend({ +var _default = core2.extend({ implicit: [ timestamp, merge @@ -57639,7 +57572,7 @@ function getErrorMessage(error2) { // src/actions-util.ts var pkg = require_package(); var getOptionalInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); return value.length > 0 ? value : void 0; }; function getTemporaryDirectory() { @@ -57651,17 +57584,17 @@ var persistInputs = function() { const inputEnvironmentVariables = Object.entries(process.env).filter( ([name]) => name.startsWith("INPUT_") ); - core3.saveState(persistedInputsKey, JSON.stringify(inputEnvironmentVariables)); + core4.saveState(persistedInputsKey, JSON.stringify(inputEnvironmentVariables)); }; // src/logging.ts -var core4 = __toESM(require_core()); +var core5 = __toESM(require_core()); function getActionsLogger() { - return core4; + return core5; } // src/start-proxy.ts -var core5 = __toESM(require_core()); +var core6 = __toESM(require_core()); // src/languages.ts var KnownLanguage = /* @__PURE__ */ ((KnownLanguage2) => { @@ -57742,10 +57675,10 @@ function getCredentials(logger, registrySecrets, registriesCredentials, language throw new ConfigurationError("Invalid credentials - must be an object"); } if (isDefined(e.password)) { - core5.setSecret(e.password); + core6.setSecret(e.password); } if (isDefined(e.token)) { - core5.setSecret(e.token); + core6.setSecret(e.token); } if (!isDefined(e.url) && !isDefined(e.host)) { throw new ConfigurationError( @@ -57830,7 +57763,7 @@ async function runWrapper() { const logger = getActionsLogger(); const tempDir = getTemporaryDirectory(); const proxyLogFilePath = path.resolve(tempDir, "proxy.log"); - core6.saveState("proxy-log-file", proxyLogFilePath); + core7.saveState("proxy-log-file", proxyLogFilePath); const credentials = getCredentials( logger, getOptionalInput("registry_secrets"), @@ -57871,7 +57804,7 @@ async function startProxy(binPath, config, logFilePath, logger) { ); subprocess.unref(); if (subprocess.pid) { - core6.saveState("proxy-process-pid", `${subprocess.pid}`); + core7.saveState("proxy-process-pid", `${subprocess.pid}`); } subprocess.on("error", (error2) => { subprocessError = error2; @@ -57890,16 +57823,16 @@ async function startProxy(binPath, config, logFilePath, logger) { throw subprocessError; } logger.info(`Proxy started on ${host}:${port}`); - core6.setOutput("proxy_host", host); - core6.setOutput("proxy_port", port.toString()); - core6.setOutput("proxy_ca_certificate", config.ca.cert); + core7.setOutput("proxy_host", host); + core7.setOutput("proxy_port", port.toString()); + core7.setOutput("proxy_ca_certificate", config.ca.cert); const registry_urls = config.all_credentials.filter((credential) => credential.url !== void 0).map((credential) => ({ type: credential.type, url: credential.url })); - core6.setOutput("proxy_urls", JSON.stringify(registry_urls)); + core7.setOutput("proxy_urls", JSON.stringify(registry_urls)); } catch (error2) { - core6.setFailed(`start-proxy action failed: ${getErrorMessage(error2)}`); + core7.setFailed(`start-proxy action failed: ${getErrorMessage(error2)}`); } } async function getProxyBinaryPath() { diff --git a/lib/upload-lib.js b/lib/upload-lib.js index 928711b52..5d4c3aeb0 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -35659,127 +35659,6 @@ var require_del = __commonJS({ } }); -// node_modules/tiny-each-async/index.js -var require_tiny_each_async = __commonJS({ - "node_modules/tiny-each-async/index.js"(exports2, module2) { - "use strict"; - module2.exports = function eachAsync(arr, parallelLimit, iteratorFn, cb) { - var pending = 0; - var index = 0; - var lastIndex = arr.length - 1; - var called = false; - var limit; - var callback; - var iterate; - if (typeof parallelLimit === "number") { - limit = parallelLimit; - iterate = iteratorFn; - callback = cb || function noop() { - }; - } else { - iterate = parallelLimit; - callback = iteratorFn || function noop() { - }; - limit = arr.length; - } - if (!arr.length) { - return callback(); - } - var iteratorLength = iterate.length; - var shouldCallNextIterator = function shouldCallNextIterator2() { - return !called && pending < limit && index < lastIndex; - }; - var iteratorCallback = function iteratorCallback2(err) { - if (called) { - return; - } - pending--; - if (err || index === lastIndex && !pending) { - called = true; - callback(err); - } else if (shouldCallNextIterator()) { - processIterator(++index); - } - }; - var processIterator = function processIterator2() { - pending++; - var args = iteratorLength === 2 ? [arr[index], iteratorCallback] : [arr[index], index, iteratorCallback]; - iterate.apply(null, args); - if (shouldCallNextIterator()) { - processIterator2(++index); - } - }; - processIterator(); - }; - } -}); - -// node_modules/get-folder-size/index.js -var require_get_folder_size = __commonJS({ - "node_modules/get-folder-size/index.js"(exports2, module2) { - "use strict"; - var fs11 = require("fs"); - var path11 = require("path"); - var eachAsync = require_tiny_each_async(); - function readSizeRecursive(seen, item, ignoreRegEx, callback) { - let cb; - let ignoreRegExp; - if (!callback) { - cb = ignoreRegEx; - ignoreRegExp = null; - } else { - cb = callback; - ignoreRegExp = ignoreRegEx; - } - fs11.lstat(item, function lstat(e, stats) { - let total = !e ? stats.size || 0 : 0; - if (stats) { - if (seen.has(stats.ino)) { - return cb(null, 0); - } - seen.add(stats.ino); - } - if (!e && stats.isDirectory()) { - fs11.readdir(item, (err, list) => { - if (err) { - return cb(err); - } - eachAsync( - list, - 5e3, - (dirItem, next) => { - readSizeRecursive( - seen, - path11.join(item, dirItem), - ignoreRegExp, - (error2, size) => { - if (!error2) { - total += size; - } - next(error2); - } - ); - }, - (finalErr) => { - cb(finalErr, total); - } - ); - }); - } else { - if (ignoreRegExp && ignoreRegExp.test(item)) { - total = 0; - } - cb(e, total); - } - }); - } - module2.exports = (...args) => { - args.unshift(/* @__PURE__ */ new Set()); - return readSizeRecursive(...args); - }; - } -}); - // node_modules/semver/internal/constants.js var require_constants9 = __commonJS({ "node_modules/semver/internal/constants.js"(exports2, module2) { @@ -37735,7 +37614,7 @@ var require_package = __commonJS({ "fast-deep-equal": "^3.1.3", "file-url": "^3.0.0", "follow-redirects": "^1.15.11", - "get-folder-size": "^2.0.1", + "get-folder-size": "^5.0.0", "js-yaml": "^4.1.0", jsonschema: "1.4.1", long: "^5.3.2", @@ -39399,7 +39278,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core11 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -39409,15 +39288,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core11.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core12.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core11.debug(`implicitDescendants '${result.implicitDescendants}'`); + core12.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core11.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core12.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -40081,7 +39960,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core11 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var fs11 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path11 = __importStar4(require("path")); @@ -40132,7 +40011,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core11.debug(`Search path '${searchPath}'`); + core12.debug(`Search path '${searchPath}'`); try { yield __await4(fs11.promises.lstat(searchPath)); } catch (err) { @@ -40204,7 +40083,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core11.debug(`Broken symlink '${item.path}'`); + core12.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -40220,7 +40099,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core11.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core12.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -41544,7 +41423,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core11 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob = __importStar4(require_glob2()); var io6 = __importStar4(require_io()); @@ -41597,7 +41476,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path11.relative(workspace, file).replace(new RegExp(`\\${path11.sep}`, "g"), "/"); - core11.debug(`Matched: ${relativeFile}`); + core12.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -41627,7 +41506,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core11.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core12.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -41638,10 +41517,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core11.debug(err.message); + core12.debug(err.message); } versionOutput = versionOutput.trim(); - core11.debug(versionOutput); + core12.debug(versionOutput); return versionOutput; }); } @@ -41649,7 +41528,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core11.debug(`zstd version: ${version}`); + core12.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -76961,7 +76840,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core11 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -77003,7 +76882,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core11.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core12.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -77058,14 +76937,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core11.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core12.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core11.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core12.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -77136,7 +77015,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core11 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -77197,9 +77076,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core11.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core12.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core11.debug(`${name} - Error is not retryable`); + core12.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -77304,7 +77183,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core11 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -77342,7 +77221,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core11.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core12.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -77376,7 +77255,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core11.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core12.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -77426,7 +77305,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core11.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core12.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -77437,7 +77316,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core11.debug("Unable to validate download, no Content-Length header"); + core12.debug("Unable to validate download, no Content-Length header"); } }); } @@ -77557,7 +77436,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core11.debug("Unable to determine content length, downloading file with http-client..."); + core12.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -77637,7 +77516,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core11 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -77657,9 +77536,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core11.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core11.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core11.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core12.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core12.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -77696,12 +77575,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core11.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core11.debug(`Download concurrency: ${result.downloadConcurrency}`); - core11.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core11.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core11.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core11.debug(`Lookup only: ${result.lookupOnly}`); + core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core12.debug(`Download concurrency: ${result.downloadConcurrency}`); + core12.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core12.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core12.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core12.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -77881,7 +77760,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core11 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs11 = __importStar4(require("fs")); @@ -77899,7 +77778,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core11.debug(`Resource Url: ${url}`); + core12.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -77927,7 +77806,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core11.isDebug()) { + if (core12.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -77940,9 +77819,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core11.setSecret(cacheDownloadUrl); - core11.debug(`Cache Result:`); - core11.debug(JSON.stringify(cacheResult)); + core12.setSecret(cacheDownloadUrl); + core12.debug(`Cache Result:`); + core12.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -77957,10 +77836,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core11.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core12.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core11.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core12.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -78005,7 +77884,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core11.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core12.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -78027,7 +77906,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core11.debug("Awaiting all uploads"); + core12.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -78070,16 +77949,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core11.debug("Upload cache"); + core12.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core11.debug("Commiting cache"); + core12.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core11.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core12.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core11.info("Cache saved successfully"); + core12.info("Cache saved successfully"); } }); } @@ -83505,7 +83384,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core11 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var path11 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -83558,7 +83437,7 @@ var require_cache3 = __commonJS({ function restoreCache3(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core11.debug(`Cache service version: ${cacheServiceVersion}`); + core12.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -83574,8 +83453,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core11.debug("Resolved Keys:"); - core11.debug(JSON.stringify(keys)); + core12.debug("Resolved Keys:"); + core12.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -83593,19 +83472,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core11.info("Lookup only - skipping download"); + core12.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path11.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core11.debug(`Archive Path: ${archivePath}`); + core12.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core11.isDebug()) { + if (core12.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core11.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core11.info("Cache restored successfully"); + core12.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -83613,16 +83492,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core11.error(`Failed to restore: ${error2.message}`); + core12.error(`Failed to restore: ${error2.message}`); } else { - core11.warning(`Failed to restore: ${error2.message}`); + core12.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core11.debug(`Failed to delete archive: ${error2}`); + core12.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -83633,8 +83512,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core11.debug("Resolved Keys:"); - core11.debug(JSON.stringify(keys)); + core12.debug("Resolved Keys:"); + core12.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -83652,30 +83531,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core11.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core12.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core11.info(`Cache hit for restore-key: ${response.matchedKey}`); + core12.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core11.info(`Cache hit for: ${response.matchedKey}`); + core12.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core11.info("Lookup only - skipping download"); + core12.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path11.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core11.debug(`Archive path: ${archivePath}`); - core11.debug(`Starting download of archive to: ${archivePath}`); + core12.debug(`Archive path: ${archivePath}`); + core12.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core11.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core11.isDebug()) { + core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core12.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core11.info("Cache restored successfully"); + core12.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -83683,9 +83562,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core11.error(`Failed to restore: ${error2.message}`); + core12.error(`Failed to restore: ${error2.message}`); } else { - core11.warning(`Failed to restore: ${error2.message}`); + core12.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -83694,7 +83573,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core11.debug(`Failed to delete archive: ${error2}`); + core12.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -83703,7 +83582,7 @@ var require_cache3 = __commonJS({ function saveCache3(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core11.debug(`Cache service version: ${cacheServiceVersion}`); + core12.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -83722,26 +83601,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core11.debug("Cache Paths:"); - core11.debug(`${JSON.stringify(cachePaths)}`); + core12.debug("Cache Paths:"); + core12.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path11.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core11.debug(`Archive Path: ${archivePath}`); + core12.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core11.isDebug()) { + if (core12.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core11.debug(`File Size: ${archiveFileSize}`); + core12.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core11.debug("Reserving Cache"); + core12.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -83754,26 +83633,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core11.debug(`Saving Cache (ID: ${cacheId})`); + core12.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core11.info(`Failed to save: ${typedError.message}`); + core12.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core11.error(`Failed to save: ${typedError.message}`); + core12.error(`Failed to save: ${typedError.message}`); } else { - core11.warning(`Failed to save: ${typedError.message}`); + core12.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core11.debug(`Failed to delete archive: ${error2}`); + core12.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -83786,26 +83665,26 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core11.debug("Cache Paths:"); - core11.debug(`${JSON.stringify(cachePaths)}`); + core12.debug("Cache Paths:"); + core12.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path11.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core11.debug(`Archive Path: ${archivePath}`); + core12.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core11.isDebug()) { + if (core12.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core11.debug(`File Size: ${archiveFileSize}`); + core12.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } options.archiveSizeBytes = archiveFileSize; - core11.debug("Reserving Cache"); + core12.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -83819,10 +83698,10 @@ var require_cache3 = __commonJS({ } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core11.debug(`Failed to reserve cache: ${error2}`); + core12.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core11.debug(`Attempting to upload cache located at: ${archivePath}`); + core12.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -83830,7 +83709,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core11.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core12.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); } @@ -83840,19 +83719,19 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core11.info(`Failed to save: ${typedError.message}`); + core12.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core11.error(`Failed to save: ${typedError.message}`); + core12.error(`Failed to save: ${typedError.message}`); } else { - core11.warning(`Failed to save: ${typedError.message}`); + core12.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core11.debug(`Failed to delete archive: ${error2}`); + core12.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -84060,7 +83939,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core11 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -84083,10 +83962,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core11.info(err.message); + core12.info(err.message); } const seconds = this.getSleepAmount(); - core11.info(`Waiting ${seconds} seconds before trying again`); + core12.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -84166,7 +84045,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core11 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs11 = __importStar4(require("fs")); @@ -84195,8 +84074,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path11.join(_getTempDirectory(), crypto.randomUUID()); yield io6.mkdirP(path11.dirname(dest)); - core11.debug(`Downloading ${url}`); - core11.debug(`Destination ${dest}`); + core12.debug(`Downloading ${url}`); + core12.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -84223,7 +84102,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core11.debug("set auth"); + core12.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -84232,7 +84111,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError(response.message.statusCode); - core11.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core12.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -84241,16 +84120,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs11.createWriteStream(dest)); - core11.debug("download complete"); + core12.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core11.debug("download failed"); + core12.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core11.debug(`Failed to delete '${dest}'. ${err.message}`); + core12.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -84265,7 +84144,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core11.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core12.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -84315,7 +84194,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core11.debug("Checking tar --version"); + core12.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -84325,7 +84204,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core11.debug(versionOutput.trim()); + core12.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -84333,7 +84212,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core11.isDebug() && !flags.includes("v")) { + if (core12.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -84365,7 +84244,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core11.isDebug()) { + if (core12.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -84410,7 +84289,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core11.debug(`Using pwsh at path: ${pwshPath}`); + core12.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -84430,7 +84309,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core11.debug(`Using powershell at path: ${powershellPath}`); + core12.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -84439,7 +84318,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core11.isDebug()) { + if (!core12.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -84450,8 +84329,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os2.arch(); - core11.debug(`Caching tool ${tool} ${version} ${arch2}`); - core11.debug(`source dir: ${sourceDir}`); + core12.debug(`Caching tool ${tool} ${version} ${arch2}`); + core12.debug(`source dir: ${sourceDir}`); if (!fs11.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -84469,14 +84348,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os2.arch(); - core11.debug(`Caching tool ${tool} ${version} ${arch2}`); - core11.debug(`source file: ${sourceFile}`); + core12.debug(`Caching tool ${tool} ${version} ${arch2}`); + core12.debug(`source file: ${sourceFile}`); if (!fs11.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path11.join(destFolder, targetFile); - core11.debug(`destination file ${destPath}`); + core12.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -84500,12 +84379,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path11.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core11.debug(`checking cache: ${cachePath}`); + core12.debug(`checking cache: ${cachePath}`); if (fs11.existsSync(cachePath) && fs11.existsSync(`${cachePath}.complete`)) { - core11.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core12.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core11.debug("not found"); + core12.debug("not found"); } } return toolPath; @@ -84536,7 +84415,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core11.debug("set auth"); + core12.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -84557,7 +84436,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core11.debug("Invalid json"); + core12.debug("Invalid json"); } } return releases; @@ -84583,7 +84462,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path11.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); - core11.debug(`destination ${folderPath}`); + core12.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -84595,19 +84474,19 @@ var require_tool_cache = __commonJS({ const folderPath = path11.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs11.writeFileSync(markerPath, ""); - core11.debug("finished caching tool"); + core12.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core11.debug(`isExplicit: ${c}`); + core12.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core11.debug(`explicit? ${valid3}`); + core12.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core11.debug(`evaluating ${versions.length} versions`); + core12.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -84623,9 +84502,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core11.debug(`matched: ${version}`); + core12.debug(`matched: ${version}`); } else { - core11.debug("match not found"); + core12.debug("match not found"); } return version; } @@ -88123,25 +88002,79 @@ module.exports = __toCommonJS(upload_lib_exports); var fs10 = __toESM(require("fs")); var path10 = __toESM(require("path")); var import_zlib = __toESM(require("zlib")); -var core10 = __toESM(require_core()); +var core11 = __toESM(require_core()); var import_file_url = __toESM(require_file_url()); var jsonschema = __toESM(require_lib2()); // src/actions-util.ts var fs = __toESM(require("fs")); var path2 = __toESM(require("path")); -var core3 = __toESM(require_core()); +var core4 = __toESM(require_core()); var toolrunner = __toESM(require_toolrunner()); var github = __toESM(require_github()); var io2 = __toESM(require_io()); // src/util.ts var path = __toESM(require("path")); -var core2 = __toESM(require_core()); +var core3 = __toESM(require_core()); var exec = __toESM(require_exec()); var io = __toESM(require_io()); var import_del = __toESM(require_del()); -var import_get_folder_size = __toESM(require_get_folder_size()); + +// node_modules/get-folder-size/index.js +var import_node_path = require("node:path"); +async function getFolderSize(itemPath, options) { + return await core(itemPath, options, { errors: true }); +} +getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); +getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); +async function core(rootItemPath, options = {}, returnType = {}) { + const fs11 = options.fs || await import("node:fs/promises"); + let folderSize = 0n; + const foundInos = /* @__PURE__ */ new Set(); + const errors = []; + await processItem(rootItemPath); + async function processItem(itemPath) { + if (options.ignore?.test(itemPath)) return; + const stats = returnType.strict ? await fs11.lstat(itemPath, { bigint: true }) : await fs11.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + if (typeof stats !== "object") return; + if (!foundInos.has(stats.ino)) { + foundInos.add(stats.ino); + folderSize += stats.size; + } + if (stats.isDirectory()) { + const directoryItems = returnType.strict ? await fs11.readdir(itemPath) : await fs11.readdir(itemPath).catch((error2) => errors.push(error2)); + if (typeof directoryItems !== "object") return; + await Promise.all( + directoryItems.map( + (directoryItem) => processItem((0, import_node_path.join)(itemPath, directoryItem)) + ) + ); + } + } + if (!options.bigint) { + if (folderSize > BigInt(Number.MAX_SAFE_INTEGER)) { + const error2 = new RangeError( + "The folder size is too large to return as a Number. You can instruct this package to return a BigInt instead." + ); + if (returnType.strict) { + throw error2; + } + errors.push(error2); + folderSize = Number.MAX_SAFE_INTEGER; + } else { + folderSize = Number(folderSize); + } + } + if (returnType.errors) { + return { + size: folderSize, + errors: errors.length > 0 ? errors : null + }; + } else { + return folderSize; + } +} // node_modules/js-yaml/dist/js-yaml.mjs function isNothing(subject) { @@ -88712,7 +88645,7 @@ var json = failsafe.extend({ float ] }); -var core = json; +var core2 = json; var YAML_DATE_REGEXP = new RegExp( "^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$" ); @@ -88926,7 +88859,7 @@ var set = new type("tag:yaml.org,2002:set", { resolve: resolveYamlSet, construct: constructYamlSet }); -var _default = core.extend({ +var _default = core2.extend({ implicit: [ timestamp, merge @@ -90905,14 +90838,14 @@ async function asyncSome(array, predicate) { // src/actions-util.ts var pkg = require_package(); var getRequiredInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); if (!value) { throw new ConfigurationError(`Input required and not supplied: ${name}`); } return value; }; var getOptionalInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); return value.length > 0 ? value : void 0; }; function getTemporaryDirectory() { @@ -91032,7 +90965,7 @@ async function runTool(cmd, args = [], opts = {}) { } // src/api-client.ts -var core4 = __toESM(require_core()); +var core5 = __toESM(require_core()); var githubUtils = __toESM(require_utils4()); var retry = __toESM(require_dist_node15()); var import_console_log_level = __toESM(require_console_log_level()); @@ -91137,7 +91070,7 @@ async function getAnalysisKey() { const workflowPath = await getWorkflowRelativePath(); const jobName = getRequiredEnvParam("GITHUB_JOB"); analysisKey = `${workflowPath}:${jobName}`; - core4.exportVariable(analysisKeyEnvVar, analysisKey); + core5.exportVariable(analysisKeyEnvVar, analysisKey); return analysisKey; } function computeAutomationID(analysis_key, environment) { @@ -91170,7 +91103,7 @@ function wrapApiConfigurationError(e) { // src/codeql.ts var fs8 = __toESM(require("fs")); var path8 = __toESM(require("path")); -var core9 = __toESM(require_core()); +var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); // src/cli-errors.ts @@ -91424,7 +91357,7 @@ var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { var supportedAnalysisKinds = new Set(Object.values(AnalysisKind)); // src/caching-utils.ts -var core5 = __toESM(require_core()); +var core6 = __toESM(require_core()); // src/diff-informed-analysis-utils.ts var fs3 = __toESM(require("fs")); @@ -91443,13 +91376,13 @@ var path3 = __toESM(require("path")); var actionsCache = __toESM(require_cache3()); // src/git-utils.ts -var core6 = __toESM(require_core()); +var core7 = __toESM(require_core()); var toolrunner2 = __toESM(require_toolrunner()); var io3 = __toESM(require_io()); var runGitCommand = async function(workingDirectory, args, customErrorMessage) { let stdout = ""; let stderr = ""; - core6.debug(`Running git command: git ${args.join(" ")}`); + core7.debug(`Running git command: git ${args.join(" ")}`); try { await new toolrunner2.ToolRunner(await io3.which("git", true), args, { silent: true, @@ -91469,7 +91402,7 @@ var runGitCommand = async function(workingDirectory, args, customErrorMessage) { if (stderr.includes("not a git repository")) { reason = "The checkout path provided to the action does not appear to be a git repository."; } - core6.info(`git call failed. ${customErrorMessage} Error: ${reason}`); + core7.info(`git call failed. ${customErrorMessage} Error: ${reason}`); throw error2; } }; @@ -91614,7 +91547,7 @@ async function getRef() { ) !== head; if (hasChangedRef) { const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head"); - core6.debug( + core7.debug( `No longer on merge commit, rewriting ref from ${ref} to ${newRef}.` ); return newRef; @@ -91640,7 +91573,7 @@ async function isAnalyzingDefaultBranch() { } // src/logging.ts -var core7 = __toESM(require_core()); +var core8 = __toESM(require_core()); function formatDuration(durationMs) { if (durationMs < 1e3) { return `${durationMs}ms`; @@ -92224,7 +92157,7 @@ var fs6 = __toESM(require("fs")); var os = __toESM(require("os")); var path6 = __toESM(require("path")); var import_perf_hooks = require("perf_hooks"); -var core8 = __toESM(require_core()); +var core9 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); var toolcache2 = __toESM(require_tool_cache()); var import_follow_redirects = __toESM(require_follow_redirects()); @@ -92278,10 +92211,10 @@ async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorizat }; } } catch (e) { - core8.warning( + core9.warning( `Failed to download and extract CodeQL bundle using streaming with error: ${getErrorMessage(e)}` ); - core8.warning(`Falling back to downloading the bundle before extracting.`); + core9.warning(`Falling back to downloading the bundle before extracting.`); await cleanUpGlob(dest, "CodeQL bundle", logger); } const toolsDownloadStart = import_perf_hooks.performance.now(); @@ -93329,12 +93262,12 @@ ${output}` ); } else if (checkVersion && process.env["CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */] !== "true" && !await codeQlVersionAtLeast(codeql, CODEQL_NEXT_MINIMUM_VERSION)) { const result = await codeql.getVersion(); - core9.warning( + core10.warning( `CodeQL CLI version ${result.version} was discontinued on ${GHES_MOST_RECENT_DEPRECATION_DATE} alongside GitHub Enterprise Server ${GHES_VERSION_MOST_RECENTLY_DEPRECATED} and will not be supported by the next minor release of the CodeQL Action. Please update to CodeQL CLI version ${CODEQL_NEXT_MINIMUM_VERSION} or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version. Alternatively, if you want to continue using CodeQL CLI version ${result.version}, you can replace 'github/codeql-action/*@v${getActionVersion().split(".")[0]}' by 'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to continue using this version of the CodeQL Action.` ); - core9.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); + core10.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); } return codeql; } @@ -94692,7 +94625,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core10.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core11.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -94742,7 +94675,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple CodeQL runs with the same category is deprecated ${deprecationWarningMessage} for CodeQL CLI 2.16.6 and earlier. Please update your CodeQL CLI version or update your workflow to set a distinct category for each CodeQL run. ${deprecationMoreInformationMessage}` ); - core10.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core11.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -94812,13 +94745,13 @@ async function uploadPayload(payload, repositoryNwo, logger, target) { if (isHTTPError(e)) { switch (e.status) { case 403: - core10.warning(e.message || GENERIC_403_MSG); + core11.warning(e.message || GENERIC_403_MSG); break; case 404: - core10.warning(e.message || GENERIC_404_MSG); + core11.warning(e.message || GENERIC_404_MSG); break; default: - core10.warning(e.message); + core11.warning(e.message); break; } } @@ -95164,7 +95097,7 @@ function validateUniqueCategory(sarif, sentinelPrefix = CodeScanningTarget.senti `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core10.exportVariable(sentinelEnvVar, sentinelEnvVar); + core11.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { diff --git a/lib/upload-sarif-action-post.js b/lib/upload-sarif-action-post.js index e17f9b23b..a61d42310 100644 --- a/lib/upload-sarif-action-post.js +++ b/lib/upload-sarif-action-post.js @@ -34336,127 +34336,6 @@ var require_del = __commonJS({ } }); -// node_modules/tiny-each-async/index.js -var require_tiny_each_async = __commonJS({ - "node_modules/tiny-each-async/index.js"(exports2, module2) { - "use strict"; - module2.exports = function eachAsync(arr, parallelLimit, iteratorFn, cb) { - var pending = 0; - var index = 0; - var lastIndex = arr.length - 1; - var called = false; - var limit; - var callback; - var iterate; - if (typeof parallelLimit === "number") { - limit = parallelLimit; - iterate = iteratorFn; - callback = cb || function noop() { - }; - } else { - iterate = parallelLimit; - callback = iteratorFn || function noop() { - }; - limit = arr.length; - } - if (!arr.length) { - return callback(); - } - var iteratorLength = iterate.length; - var shouldCallNextIterator = function shouldCallNextIterator2() { - return !called && pending < limit && index < lastIndex; - }; - var iteratorCallback = function iteratorCallback2(err) { - if (called) { - return; - } - pending--; - if (err || index === lastIndex && !pending) { - called = true; - callback(err); - } else if (shouldCallNextIterator()) { - processIterator(++index); - } - }; - var processIterator = function processIterator2() { - pending++; - var args = iteratorLength === 2 ? [arr[index], iteratorCallback] : [arr[index], index, iteratorCallback]; - iterate.apply(null, args); - if (shouldCallNextIterator()) { - processIterator2(++index); - } - }; - processIterator(); - }; - } -}); - -// node_modules/get-folder-size/index.js -var require_get_folder_size = __commonJS({ - "node_modules/get-folder-size/index.js"(exports2, module2) { - "use strict"; - var fs2 = require("fs"); - var path2 = require("path"); - var eachAsync = require_tiny_each_async(); - function readSizeRecursive(seen, item, ignoreRegEx, callback) { - let cb; - let ignoreRegExp; - if (!callback) { - cb = ignoreRegEx; - ignoreRegExp = null; - } else { - cb = callback; - ignoreRegExp = ignoreRegEx; - } - fs2.lstat(item, function lstat(e, stats) { - let total = !e ? stats.size || 0 : 0; - if (stats) { - if (seen.has(stats.ino)) { - return cb(null, 0); - } - seen.add(stats.ino); - } - if (!e && stats.isDirectory()) { - fs2.readdir(item, (err, list) => { - if (err) { - return cb(err); - } - eachAsync( - list, - 5e3, - (dirItem, next) => { - readSizeRecursive( - seen, - path2.join(item, dirItem), - ignoreRegExp, - (error2, size) => { - if (!error2) { - total += size; - } - next(error2); - } - ); - }, - (finalErr) => { - cb(finalErr, total); - } - ); - }); - } else { - if (ignoreRegExp && ignoreRegExp.test(item)) { - total = 0; - } - cb(e, total); - } - }); - } - module2.exports = (...args) => { - args.unshift(/* @__PURE__ */ new Set()); - return readSizeRecursive(...args); - }; - } -}); - // node_modules/semver/internal/constants.js var require_constants9 = __commonJS({ "node_modules/semver/internal/constants.js"(exports2, module2) { @@ -36412,7 +36291,7 @@ var require_package = __commonJS({ "fast-deep-equal": "^3.1.3", "file-url": "^3.0.0", "follow-redirects": "^1.15.11", - "get-folder-size": "^2.0.1", + "get-folder-size": "^5.0.0", "js-yaml": "^4.1.0", jsonschema: "1.4.1", long: "^5.3.2", @@ -44371,14 +44250,14 @@ var require_retention = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getExpiration = void 0; var generated_1 = require_generated(); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getExpiration(retentionDays) { if (!retentionDays) { return void 0; } const maxRetentionDays = getRetentionDays(); if (maxRetentionDays && maxRetentionDays < retentionDays) { - core13.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); + core14.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); retentionDays = maxRetentionDays; } const expirationDate = /* @__PURE__ */ new Date(); @@ -44970,7 +44849,7 @@ var require_util8 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getBackendIdsFromToken = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var config_1 = require_config(); var jwt_decode_1 = __importDefault4(require_jwt_decode_cjs()); var InvalidJwtError = new Error("Failed to get backend IDs: The provided JWT token is invalid and/or missing claims"); @@ -44996,8 +44875,8 @@ var require_util8 = __commonJS({ workflowRunBackendId: scopeParts[1], workflowJobRunBackendId: scopeParts[2] }; - core13.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); - core13.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); + core14.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); + core14.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); return ids; } throw InvalidJwtError; @@ -80190,7 +80069,7 @@ var require_blob_upload = __commonJS({ exports2.uploadZipToBlobStorage = void 0; var storage_blob_1 = require_dist7(); var config_1 = require_config(); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var crypto = __importStar4(require("crypto")); var stream = __importStar4(require("stream")); var errors_1 = require_errors2(); @@ -80216,9 +80095,9 @@ var require_blob_upload = __commonJS({ const bufferSize = (0, config_1.getUploadChunkSize)(); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); - core13.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); + core14.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); const uploadCallback = (progress) => { - core13.info(`Uploaded bytes ${progress.loadedBytes}`); + core14.info(`Uploaded bytes ${progress.loadedBytes}`); uploadByteCount = progress.loadedBytes; lastProgressTime = Date.now(); }; @@ -80232,7 +80111,7 @@ var require_blob_upload = __commonJS({ const hashStream = crypto.createHash("sha256"); zipUploadStream.pipe(uploadStream); zipUploadStream.pipe(hashStream).setEncoding("hex"); - core13.info("Beginning upload of artifact content to blob storage"); + core14.info("Beginning upload of artifact content to blob storage"); try { yield Promise.race([ blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options), @@ -80246,12 +80125,12 @@ var require_blob_upload = __commonJS({ } finally { abortController.abort(); } - core13.info("Finished uploading artifact content to blob storage!"); + core14.info("Finished uploading artifact content to blob storage!"); hashStream.end(); sha256Hash = hashStream.read(); - core13.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`); + core14.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`); if (uploadByteCount === 0) { - core13.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); + core14.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); } return { uploadSize: uploadByteCount, @@ -83307,7 +83186,7 @@ var require_BufferList = __commonJS({ this.head = this.tail = null; this.length = 0; }; - BufferList.prototype.join = function join(s) { + BufferList.prototype.join = function join2(s) { if (this.length === 0) return ""; var p = this.head; var ret = "" + p.data; @@ -104310,7 +104189,7 @@ var require_zip2 = __commonJS({ var stream = __importStar4(require("stream")); var promises_1 = require("fs/promises"); var archiver2 = __importStar4(require_archiver()); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var config_1 = require_config(); exports2.DEFAULT_COMPRESSION_LEVEL = 6; var ZipUploadStream = class extends stream.Transform { @@ -104327,7 +104206,7 @@ var require_zip2 = __commonJS({ exports2.ZipUploadStream = ZipUploadStream; function createZipUploadStream(uploadSpecification, compressionLevel = exports2.DEFAULT_COMPRESSION_LEVEL) { return __awaiter4(this, void 0, void 0, function* () { - core13.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); + core14.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); const zip = archiver2.create("zip", { highWaterMark: (0, config_1.getUploadChunkSize)(), zlib: { level: compressionLevel } @@ -104351,8 +104230,8 @@ var require_zip2 = __commonJS({ } const bufferSize = (0, config_1.getUploadChunkSize)(); const zipUploadStream = new ZipUploadStream(bufferSize); - core13.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); - core13.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); + core14.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); + core14.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); zip.pipe(zipUploadStream); zip.finalize(); return zipUploadStream; @@ -104360,24 +104239,24 @@ var require_zip2 = __commonJS({ } exports2.createZipUploadStream = createZipUploadStream; var zipErrorCallback = (error2) => { - core13.error("An error has occurred while creating the zip file for upload"); - core13.info(error2); + core14.error("An error has occurred while creating the zip file for upload"); + core14.info(error2); throw new Error("An error has occurred during zip creation for the artifact"); }; var zipWarningCallback = (error2) => { if (error2.code === "ENOENT") { - core13.warning("ENOENT warning during artifact zip creation. No such file or directory"); - core13.info(error2); + core14.warning("ENOENT warning during artifact zip creation. No such file or directory"); + core14.info(error2); } else { - core13.warning(`A non-blocking warning has occurred during artifact zip creation: ${error2.code}`); - core13.info(error2); + core14.warning(`A non-blocking warning has occurred during artifact zip creation: ${error2.code}`); + core14.info(error2); } }; var zipFinishCallback = () => { - core13.debug("Zip stream for upload has finished."); + core14.debug("Zip stream for upload has finished."); }; var zipEndCallback = () => { - core13.debug("Zip stream for upload has ended."); + core14.debug("Zip stream for upload has ended."); }; } }); @@ -104442,7 +104321,7 @@ var require_upload_artifact = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadArtifact = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var retention_1 = require_retention(); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation(); var artifact_twirp_client_1 = require_artifact_twirp_client2(); @@ -104489,13 +104368,13 @@ var require_upload_artifact = __commonJS({ value: `sha256:${uploadResult.sha256Hash}` }); } - core13.info(`Finalizing artifact upload`); + core14.info(`Finalizing artifact upload`); const finalizeArtifactResp = yield artifactClient.FinalizeArtifact(finalizeArtifactReq); if (!finalizeArtifactResp.ok) { throw new errors_1.InvalidResponseError("FinalizeArtifact: response from backend was not ok"); } const artifactId = BigInt(finalizeArtifactResp.artifactId); - core13.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); + core14.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); return { size: uploadResult.uploadSize, digest: uploadResult.sha256Hash, @@ -111643,7 +111522,7 @@ var require_download_artifact = __commonJS({ var crypto = __importStar4(require("crypto")); var stream = __importStar4(require("stream")); var github2 = __importStar4(require_github2()); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var httpClient = __importStar4(require_lib()); var unzip_stream_1 = __importDefault4(require_unzip()); var user_agent_1 = require_user_agent(); @@ -111679,7 +111558,7 @@ var require_download_artifact = __commonJS({ return yield streamExtractExternal(url, directory); } catch (error2) { retryCount++; - core13.debug(`Failed to download artifact after ${retryCount} retries due to ${error2.message}. Retrying in 5 seconds...`); + core14.debug(`Failed to download artifact after ${retryCount} retries due to ${error2.message}. Retrying in 5 seconds...`); yield new Promise((resolve2) => setTimeout(resolve2, 5e3)); } } @@ -111708,7 +111587,7 @@ var require_download_artifact = __commonJS({ extractStream.on("data", () => { timer.refresh(); }).on("error", (error2) => { - core13.debug(`response.message: Artifact download failed: ${error2.message}`); + core14.debug(`response.message: Artifact download failed: ${error2.message}`); clearTimeout(timer); reject(error2); }).pipe(unzip_stream_1.default.Extract({ path: directory })).on("close", () => { @@ -111716,7 +111595,7 @@ var require_download_artifact = __commonJS({ if (hashStream) { hashStream.end(); sha256Digest = hashStream.read(); - core13.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); + core14.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); } resolve2({ sha256Digest: `sha256:${sha256Digest}` }); }).on("error", (error2) => { @@ -111731,7 +111610,7 @@ var require_download_artifact = __commonJS({ const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path); const api = github2.getOctokit(token); let digestMismatch = false; - core13.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); + core14.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); const { headers, status } = yield api.rest.actions.downloadArtifact({ owner: repositoryOwner, repo: repositoryName, @@ -111748,16 +111627,16 @@ var require_download_artifact = __commonJS({ if (!location) { throw new Error(`Unable to redirect to artifact download url`); } - core13.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); + core14.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); try { - core13.info(`Starting download of artifact to: ${downloadPath}`); + core14.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(location, downloadPath); - core13.info(`Artifact download completed successfully.`); + core14.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core13.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core13.debug(`Expected digest: ${options.expectedHash}`); + core14.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core14.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error2) { @@ -111784,7 +111663,7 @@ var require_download_artifact = __commonJS({ Are you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`); } if (artifacts.length > 1) { - core13.warning("Multiple artifacts found, defaulting to first."); + core14.warning("Multiple artifacts found, defaulting to first."); } const signedReq = { workflowRunBackendId: artifacts[0].workflowRunBackendId, @@ -111792,16 +111671,16 @@ Are you trying to download from a different run? Try specifying a github-token w name: artifacts[0].name }; const { signedUrl } = yield artifactClient.GetSignedArtifactURL(signedReq); - core13.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); + core14.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); try { - core13.info(`Starting download of artifact to: ${downloadPath}`); + core14.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(signedUrl, downloadPath); - core13.info(`Artifact download completed successfully.`); + core14.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core13.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core13.debug(`Expected digest: ${options.expectedHash}`); + core14.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core14.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error2) { @@ -111814,10 +111693,10 @@ Are you trying to download from a different run? Try specifying a github-token w function resolveOrCreateDirectory(downloadPath = (0, config_1.getGitHubWorkspaceDir)()) { return __awaiter4(this, void 0, void 0, function* () { if (!(yield exists(downloadPath))) { - core13.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); + core14.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); yield promises_1.default.mkdir(downloadPath, { recursive: true }); } else { - core13.debug(`Artifact destination folder already exists: ${downloadPath}`); + core14.debug(`Artifact destination folder already exists: ${downloadPath}`); } return downloadPath; }); @@ -111858,7 +111737,7 @@ var require_retry_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRetryOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var defaultMaxRetryNumber = 5; var defaultExemptStatusCodes = [400, 401, 403, 404, 422]; function getRetryOptions(defaultOptions, retries = defaultMaxRetryNumber, exemptStatusCodes = defaultExemptStatusCodes) { @@ -111873,7 +111752,7 @@ var require_retry_options = __commonJS({ retryOptions.doNotRetry = exemptStatusCodes; } const requestOptions = Object.assign(Object.assign({}, defaultOptions.request), { retries }); - core13.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); + core14.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); return [retryOptions, requestOptions]; } exports2.getRetryOptions = getRetryOptions; @@ -112030,7 +111909,7 @@ var require_get_artifact = __commonJS({ exports2.getArtifactInternal = exports2.getArtifactPublic = void 0; var github_1 = require_github2(); var plugin_retry_1 = require_dist_node25(); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var utils_1 = require_utils13(); var retry_options_1 = require_retry_options(); var plugin_request_log_1 = require_dist_node24(); @@ -112068,7 +111947,7 @@ var require_get_artifact = __commonJS({ let artifact2 = getArtifactResp.data.artifacts[0]; if (getArtifactResp.data.artifacts.length > 1) { artifact2 = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0]; - core13.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); + core14.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); } return { artifact: { @@ -112101,7 +111980,7 @@ var require_get_artifact = __commonJS({ let artifact2 = res.artifacts[0]; if (res.artifacts.length > 1) { artifact2 = res.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0]; - core13.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); + core14.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); } return { artifact: { @@ -114049,7 +113928,7 @@ var require_requestUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientRequest = exports2.retry = void 0; var utils_1 = require_utils14(); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var config_variables_1 = require_config_variables(); function retry3(name, operation, customErrorMessages, maxAttempts) { return __awaiter4(this, void 0, void 0, function* () { @@ -114076,13 +113955,13 @@ var require_requestUtils = __commonJS({ errorMessage = error2.message; } if (!isRetryable) { - core13.info(`${name} - Error is not retryable`); + core14.info(`${name} - Error is not retryable`); if (response) { (0, utils_1.displayHttpDiagnostics)(response); } break; } - core13.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); yield (0, utils_1.sleep)((0, utils_1.getExponentialRetryTimeInMilliseconds)(attempt)); attempt++; } @@ -114166,7 +114045,7 @@ var require_upload_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadHttpClient = void 0; var fs2 = __importStar4(require("fs")); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var tmp = __importStar4(require_tmp_promise()); var stream = __importStar4(require("stream")); var utils_1 = require_utils14(); @@ -114231,7 +114110,7 @@ var require_upload_http_client = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const FILE_CONCURRENCY = (0, config_variables_1.getUploadFileConcurrency)(); const MAX_CHUNK_SIZE = (0, config_variables_1.getUploadChunkSize)(); - core13.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); + core14.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); const parameters = []; let continueOnError = true; if (options) { @@ -114268,15 +114147,15 @@ var require_upload_http_client = __commonJS({ } const startTime = perf_hooks_1.performance.now(); const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); - if (core13.isDebug()) { - core13.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); + if (core14.isDebug()) { + core14.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); } uploadFileSize += uploadFileResult.successfulUploadSize; totalFileSize += uploadFileResult.totalSize; if (uploadFileResult.isSuccess === false) { failedItemsToReport.push(currentFileParameters.file); if (!continueOnError) { - core13.error(`aborting artifact upload`); + core14.error(`aborting artifact upload`); abortPendingFileUploads = true; } } @@ -114285,7 +114164,7 @@ var require_upload_http_client = __commonJS({ }))); this.statusReporter.stop(); this.uploadHttpManager.disposeAndReplaceAllClients(); - core13.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); + core14.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); return { uploadSize: uploadFileSize, totalSize: totalFileSize, @@ -114311,16 +114190,16 @@ var require_upload_http_client = __commonJS({ let uploadFileSize = 0; let isGzip = true; if (!isFIFO && totalFileSize < 65536) { - core13.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); + core14.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); const buffer = yield (0, upload_gzip_1.createGZipFileInBuffer)(parameters.file); let openUploadStream; if (totalFileSize < buffer.byteLength) { - core13.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core14.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); openUploadStream = () => fs2.createReadStream(parameters.file); isGzip = false; uploadFileSize = totalFileSize; } else { - core13.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); + core14.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); openUploadStream = () => { const passThrough = new stream.PassThrough(); passThrough.end(buffer); @@ -114332,7 +114211,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += uploadFileSize; - core13.warning(`Aborting upload for ${parameters.file} due to failure`); + core14.warning(`Aborting upload for ${parameters.file} due to failure`); } return { isSuccess: isUploadSuccessful, @@ -114341,16 +114220,16 @@ var require_upload_http_client = __commonJS({ }; } else { const tempFile = yield tmp.file(); - core13.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); + core14.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); uploadFileSize = yield (0, upload_gzip_1.createGZipFileOnDisk)(parameters.file, tempFile.path); let uploadFilePath = tempFile.path; if (!isFIFO && totalFileSize < uploadFileSize) { - core13.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core14.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); uploadFileSize = totalFileSize; uploadFilePath = parameters.file; isGzip = false; } else { - core13.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); + core14.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); } let abortFileUpload = false; while (offset < uploadFileSize) { @@ -114370,7 +114249,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += chunkSize; - core13.warning(`Aborting upload for ${parameters.file} due to failure`); + core14.warning(`Aborting upload for ${parameters.file} due to failure`); abortFileUpload = true; } else { if (uploadFileSize > 8388608) { @@ -114378,7 +114257,7 @@ var require_upload_http_client = __commonJS({ } } } - core13.debug(`deleting temporary gzip file ${tempFile.path}`); + core14.debug(`deleting temporary gzip file ${tempFile.path}`); yield tempFile.cleanup(); return { isSuccess: isUploadSuccessful, @@ -114417,7 +114296,7 @@ var require_upload_http_client = __commonJS({ if (response) { (0, utils_1.displayHttpDiagnostics)(response); } - core13.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); + core14.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); return true; } return false; @@ -114425,14 +114304,14 @@ var require_upload_http_client = __commonJS({ const backOff = (retryAfterValue) => __awaiter4(this, void 0, void 0, function* () { this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core13.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); + core14.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core13.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); + core14.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); yield (0, utils_1.sleep)(backoffTime); } - core13.info(`Finished backoff for retry #${retryCount}, continuing with upload`); + core14.info(`Finished backoff for retry #${retryCount}, continuing with upload`); return; }); while (retryCount <= retryLimit) { @@ -114440,7 +114319,7 @@ var require_upload_http_client = __commonJS({ try { response = yield uploadChunkRequest(); } catch (error2) { - core13.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); + core14.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); console.log(error2); if (incrementAndCheckRetryLimit()) { return false; @@ -114452,13 +114331,13 @@ var require_upload_http_client = __commonJS({ if ((0, utils_1.isSuccessStatusCode)(response.message.statusCode)) { return true; } else if ((0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core13.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); + core14.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); if (incrementAndCheckRetryLimit(response)) { return false; } (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { - core13.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); + core14.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); (0, utils_1.displayHttpDiagnostics)(response); return false; } @@ -114476,7 +114355,7 @@ var require_upload_http_client = __commonJS({ resourceUrl.searchParams.append("artifactName", artifactName); const parameters = { Size: size }; const data = JSON.stringify(parameters, null, 2); - core13.debug(`URL is ${resourceUrl.toString()}`); + core14.debug(`URL is ${resourceUrl.toString()}`); const client = this.uploadHttpManager.getClient(0); const headers = (0, utils_1.getUploadHeaders)("application/json", false); const customErrorMessages = /* @__PURE__ */ new Map([ @@ -114489,7 +114368,7 @@ var require_upload_http_client = __commonJS({ return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); yield response.readBody(); - core13.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); + core14.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); }); } }; @@ -114558,7 +114437,7 @@ var require_download_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DownloadHttpClient = void 0; var fs2 = __importStar4(require("fs")); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var zlib = __importStar4(require("zlib")); var utils_1 = require_utils14(); var url_1 = require("url"); @@ -114612,11 +114491,11 @@ var require_download_http_client = __commonJS({ downloadSingleArtifact(downloadItems) { return __awaiter4(this, void 0, void 0, function* () { const DOWNLOAD_CONCURRENCY = (0, config_variables_1.getDownloadFileConcurrency)(); - core13.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); + core14.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; let currentFile = 0; let downloadedFiles = 0; - core13.info(`Total number of files that will be downloaded: ${downloadItems.length}`); + core14.info(`Total number of files that will be downloaded: ${downloadItems.length}`); this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length); this.statusReporter.start(); yield Promise.all(parallelDownloads.map((index) => __awaiter4(this, void 0, void 0, function* () { @@ -114625,8 +114504,8 @@ var require_download_http_client = __commonJS({ currentFile += 1; const startTime = perf_hooks_1.performance.now(); yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); - if (core13.isDebug()) { - core13.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); + if (core14.isDebug()) { + core14.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); } this.statusReporter.incrementProcessedCount(); } @@ -114664,19 +114543,19 @@ var require_download_http_client = __commonJS({ } else { this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core13.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); + core14.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core13.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); + core14.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); yield (0, utils_1.sleep)(backoffTime); } - core13.info(`Finished backoff for retry #${retryCount}, continuing with download`); + core14.info(`Finished backoff for retry #${retryCount}, continuing with download`); } }); const isAllBytesReceived = (expected, received) => { if (!expected || !received || process.env["ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION"]) { - core13.info("Skipping download validation."); + core14.info("Skipping download validation."); return true; } return parseInt(expected) === received; @@ -114697,7 +114576,7 @@ var require_download_http_client = __commonJS({ try { response = yield makeDownloadRequest(); } catch (error2) { - core13.info("An error occurred while attempting to download a file"); + core14.info("An error occurred while attempting to download a file"); console.log(error2); yield backOff(); continue; @@ -114717,7 +114596,7 @@ var require_download_http_client = __commonJS({ } } if (forceRetry || (0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core13.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); + core14.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); resetDestinationStream(downloadPath); (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { @@ -114739,29 +114618,29 @@ var require_download_http_client = __commonJS({ if (isGzip) { const gunzip = zlib.createGunzip(); response.message.on("error", (error2) => { - core13.info(`An error occurred while attempting to read the response stream`); + core14.info(`An error occurred while attempting to read the response stream`); gunzip.close(); destinationStream.close(); reject(error2); }).pipe(gunzip).on("error", (error2) => { - core13.info(`An error occurred while attempting to decompress the response stream`); + core14.info(`An error occurred while attempting to decompress the response stream`); destinationStream.close(); reject(error2); }).pipe(destinationStream).on("close", () => { resolve2(); }).on("error", (error2) => { - core13.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core14.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error2); }); } else { response.message.on("error", (error2) => { - core13.info(`An error occurred while attempting to read the response stream`); + core14.info(`An error occurred while attempting to read the response stream`); destinationStream.close(); reject(error2); }).pipe(destinationStream).on("close", () => { resolve2(); }).on("error", (error2) => { - core13.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core14.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error2); }); } @@ -114900,7 +114779,7 @@ var require_artifact_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultArtifactClient = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var upload_specification_1 = require_upload_specification(); var upload_http_client_1 = require_upload_http_client(); var utils_1 = require_utils14(); @@ -114921,7 +114800,7 @@ var require_artifact_client = __commonJS({ */ uploadArtifact(name, files, rootDirectory, options) { return __awaiter4(this, void 0, void 0, function* () { - core13.info(`Starting artifact upload + core14.info(`Starting artifact upload For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`); (0, path_and_artifact_name_validation_1.checkArtifactName)(name); const uploadSpecification = (0, upload_specification_1.getUploadSpecification)(name, rootDirectory, files); @@ -114933,24 +114812,24 @@ For more detailed logs during the artifact upload process, enable step-debugging }; const uploadHttpClient = new upload_http_client_1.UploadHttpClient(); if (uploadSpecification.length === 0) { - core13.warning(`No files found that can be uploaded`); + core14.warning(`No files found that can be uploaded`); } else { const response = yield uploadHttpClient.createArtifactInFileContainer(name, options); if (!response.fileContainerResourceUrl) { - core13.debug(response.toString()); + core14.debug(response.toString()); throw new Error("No URL provided by the Artifact Service to upload an artifact to"); } - core13.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); - core13.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); + core14.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); + core14.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); - core13.info(`File upload process has finished. Finalizing the artifact upload`); + core14.info(`File upload process has finished. Finalizing the artifact upload`); yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); if (uploadResult.failedItems.length > 0) { - core13.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); + core14.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); } else { - core13.info(`Artifact has been finalized. All files have been successfully uploaded!`); + core14.info(`Artifact has been finalized. All files have been successfully uploaded!`); } - core13.info(` + core14.info(` The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage @@ -114984,10 +114863,10 @@ Note: The size of downloaded zips can differ significantly from the reported siz path2 = (0, path_1.resolve)(path2); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path2, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { - core13.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); + core14.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); - core13.info("Directory structure has been set up for the artifact"); + core14.info("Directory structure has been set up for the artifact"); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); } @@ -115003,7 +114882,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz const response = []; const artifacts = yield downloadHttpClient.listArtifacts(); if (artifacts.count === 0) { - core13.info("Unable to find any artifacts for the associated workflow"); + core14.info("Unable to find any artifacts for the associated workflow"); return response; } if (!path2) { @@ -115015,11 +114894,11 @@ Note: The size of downloaded zips can differ significantly from the reported siz while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; - core13.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); + core14.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path2, true); if (downloadSpecification.filesToDownload.length === 0) { - core13.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); + core14.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); @@ -115081,7 +114960,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -115091,15 +114970,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core13.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -115763,7 +115642,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs2 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path2 = __importStar4(require("path")); @@ -115814,7 +115693,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core13.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await4(fs2.promises.lstat(searchPath)); } catch (err) { @@ -115886,7 +115765,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core13.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -115902,7 +115781,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -117226,7 +117105,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob2 = __importStar4(require_glob3()); var io6 = __importStar4(require_io()); @@ -117279,7 +117158,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path2.relative(workspace, file).replace(new RegExp(`\\${path2.sep}`, "g"), "/"); - core13.debug(`Matched: ${relativeFile}`); + core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -117309,7 +117188,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core13.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -117320,10 +117199,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core13.debug(err.message); + core14.debug(err.message); } versionOutput = versionOutput.trim(); - core13.debug(versionOutput); + core14.debug(versionOutput); return versionOutput; }); } @@ -117331,7 +117210,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core13.debug(`zstd version: ${version}`); + core14.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -117521,7 +117400,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors4(); var UploadProgress = class { @@ -117563,7 +117442,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -117618,14 +117497,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core13.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core13.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -117696,7 +117575,7 @@ var require_requestUtils2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants13(); function isSuccessStatusCode(statusCode) { @@ -117757,9 +117636,9 @@ var require_requestUtils2 = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core13.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core13.debug(`${name} - Error is not retryable`); + core14.debug(`${name} - Error is not retryable`); break; } yield sleep(delay); @@ -117864,7 +117743,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -117902,7 +117781,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core13.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -117936,7 +117815,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -117986,7 +117865,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core13.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -117997,7 +117876,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core13.debug("Unable to validate download, no Content-Length header"); + core14.debug("Unable to validate download, no Content-Length header"); } }); } @@ -118117,7 +117996,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core13.debug("Unable to determine content length, downloading file with http-client..."); + core14.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -118197,7 +118076,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -118217,9 +118096,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core13.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -118256,12 +118135,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Download concurrency: ${result.downloadConcurrency}`); - core13.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core13.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core13.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core13.debug(`Lookup only: ${result.lookupOnly}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Download concurrency: ${result.downloadConcurrency}`); + core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core14.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -118441,7 +118320,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs2 = __importStar4(require("fs")); @@ -118459,7 +118338,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core13.debug(`Resource Url: ${url}`); + core14.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -118487,7 +118366,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core13.isDebug()) { + if (core14.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -118500,9 +118379,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core13.setSecret(cacheDownloadUrl); - core13.debug(`Cache Result:`); - core13.debug(JSON.stringify(cacheResult)); + core14.setSecret(cacheDownloadUrl); + core14.debug(`Cache Result:`); + core14.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -118517,10 +118396,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core13.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core13.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -118565,7 +118444,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core13.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -118587,7 +118466,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core13.debug("Awaiting all uploads"); + core14.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -118630,16 +118509,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core13.debug("Upload cache"); + core14.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core13.debug("Commiting cache"); + core14.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core13.info("Cache saved successfully"); + core14.info("Cache saved successfully"); } }); } @@ -119839,7 +119718,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var path2 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -119892,7 +119771,7 @@ var require_cache3 = __commonJS({ function restoreCache4(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -119908,8 +119787,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -119927,19 +119806,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path2.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -119947,16 +119826,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core13.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -119967,8 +119846,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -119986,30 +119865,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core13.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core14.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core13.info(`Cache hit for restore-key: ${response.matchedKey}`); + core14.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core13.info(`Cache hit for: ${response.matchedKey}`); + core14.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path2.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive path: ${archivePath}`); - core13.debug(`Starting download of archive to: ${archivePath}`); + core14.debug(`Archive path: ${archivePath}`); + core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core13.isDebug()) { + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -120017,9 +119896,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core13.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -120028,7 +119907,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -120037,7 +119916,7 @@ var require_cache3 = __commonJS({ function saveCache4(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -120056,26 +119935,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path2.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -120088,26 +119967,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core13.debug(`Saving Cache (ID: ${cacheId})`); + core14.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -120120,26 +119999,26 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path2.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } options.archiveSizeBytes = archiveFileSize; - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -120153,10 +120032,10 @@ var require_cache3 = __commonJS({ } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core13.debug(`Failed to reserve cache: ${error2}`); + core14.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core13.debug(`Attempting to upload cache located at: ${archivePath}`); + core14.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -120164,7 +120043,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core13.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); } @@ -120174,19 +120053,19 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -120394,7 +120273,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -120417,10 +120296,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core13.info(err.message); + core14.info(err.message); } const seconds = this.getSleepAmount(); - core13.info(`Waiting ${seconds} seconds before trying again`); + core14.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -120500,7 +120379,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs2 = __importStar4(require("fs")); @@ -120529,8 +120408,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path2.join(_getTempDirectory(), crypto.randomUUID()); yield io6.mkdirP(path2.dirname(dest)); - core13.debug(`Downloading ${url}`); - core13.debug(`Destination ${dest}`); + core14.debug(`Downloading ${url}`); + core14.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -120557,7 +120436,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core13.debug("set auth"); + core14.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -120566,7 +120445,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError(response.message.statusCode); - core13.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core14.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream.pipeline); @@ -120575,16 +120454,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs2.createWriteStream(dest)); - core13.debug("download complete"); + core14.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core13.debug("download failed"); + core14.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core13.debug(`Failed to delete '${dest}'. ${err.message}`); + core14.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -120599,7 +120478,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core13.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -120649,7 +120528,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core13.debug("Checking tar --version"); + core14.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -120659,7 +120538,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core13.debug(versionOutput.trim()); + core14.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -120667,7 +120546,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core13.isDebug() && !flags.includes("v")) { + if (core14.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -120699,7 +120578,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core13.isDebug()) { + if (core14.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -120744,7 +120623,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core13.debug(`Using pwsh at path: ${pwshPath}`); + core14.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -120764,7 +120643,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core13.debug(`Using powershell at path: ${powershellPath}`); + core14.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -120773,7 +120652,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core13.isDebug()) { + if (!core14.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -120784,8 +120663,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch = arch || os.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch}`); - core13.debug(`source dir: ${sourceDir}`); + core14.debug(`Caching tool ${tool} ${version} ${arch}`); + core14.debug(`source dir: ${sourceDir}`); if (!fs2.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -120803,14 +120682,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch = arch || os.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch}`); - core13.debug(`source file: ${sourceFile}`); + core14.debug(`Caching tool ${tool} ${version} ${arch}`); + core14.debug(`source file: ${sourceFile}`); if (!fs2.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch); const destPath = path2.join(destFolder, targetFile); - core13.debug(`destination file ${destPath}`); + core14.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch); return destFolder; @@ -120834,12 +120713,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path2.join(_getCacheDirectory(), toolName, versionSpec, arch); - core13.debug(`checking cache: ${cachePath}`); + core14.debug(`checking cache: ${cachePath}`); if (fs2.existsSync(cachePath) && fs2.existsSync(`${cachePath}.complete`)) { - core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); + core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { - core13.debug("not found"); + core14.debug("not found"); } } return toolPath; @@ -120870,7 +120749,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core13.debug("set auth"); + core14.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -120891,7 +120770,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core13.debug("Invalid json"); + core14.debug("Invalid json"); } } return releases; @@ -120917,7 +120796,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path2.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); - core13.debug(`destination ${folderPath}`); + core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -120929,19 +120808,19 @@ var require_tool_cache = __commonJS({ const folderPath = path2.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; fs2.writeFileSync(markerPath, ""); - core13.debug("finished caching tool"); + core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core13.debug(`isExplicit: ${c}`); + core14.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core13.debug(`explicit? ${valid3}`); + core14.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core13.debug(`evaluating ${versions.length} versions`); + core14.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -120957,9 +120836,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core13.debug(`matched: ${version}`); + core14.debug(`matched: ${version}`); } else { - core13.debug("match not found"); + core14.debug("match not found"); } return version; } @@ -121568,7 +121447,7 @@ var require_internal_glob_options_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -121580,23 +121459,23 @@ var require_internal_glob_options_helper2 = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core13.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core13.debug(`matchDirectories '${result.matchDirectories}'`); + core14.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core13.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -122280,7 +122159,7 @@ var require_internal_globber2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs2 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); var path2 = __importStar4(require("path")); @@ -122333,7 +122212,7 @@ var require_internal_globber2 = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core13.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await4(fs2.promises.lstat(searchPath)); } catch (err) { @@ -122408,7 +122287,7 @@ var require_internal_globber2 = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core13.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -122424,7 +122303,7 @@ var require_internal_globber2 = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -122517,7 +122396,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; var crypto = __importStar4(require("crypto")); - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs2 = __importStar4(require("fs")); var stream = __importStar4(require("stream")); var util = __importStar4(require("util")); @@ -122526,7 +122405,7 @@ var require_internal_hash_files = __commonJS({ var _a, e_1, _b, _c; var _d; return __awaiter4(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core13.info : core13.debug; + const writeDelegate = verbose ? core14.info : core14.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto.createHash("sha256"); @@ -122633,20 +122512,74 @@ var require_glob4 = __commonJS({ }); // src/upload-sarif-action-post.ts -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); // src/actions-util.ts -var core3 = __toESM(require_core()); +var core4 = __toESM(require_core()); var toolrunner = __toESM(require_toolrunner()); var github = __toESM(require_github()); var io2 = __toESM(require_io()); // src/util.ts -var core2 = __toESM(require_core()); +var core3 = __toESM(require_core()); var exec = __toESM(require_exec()); var io = __toESM(require_io()); var import_del = __toESM(require_del()); -var import_get_folder_size = __toESM(require_get_folder_size()); + +// node_modules/get-folder-size/index.js +var import_node_path = require("node:path"); +async function getFolderSize(itemPath, options) { + return await core(itemPath, options, { errors: true }); +} +getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); +getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); +async function core(rootItemPath, options = {}, returnType = {}) { + const fs2 = options.fs || await import("node:fs/promises"); + let folderSize = 0n; + const foundInos = /* @__PURE__ */ new Set(); + const errors = []; + await processItem(rootItemPath); + async function processItem(itemPath) { + if (options.ignore?.test(itemPath)) return; + const stats = returnType.strict ? await fs2.lstat(itemPath, { bigint: true }) : await fs2.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + if (typeof stats !== "object") return; + if (!foundInos.has(stats.ino)) { + foundInos.add(stats.ino); + folderSize += stats.size; + } + if (stats.isDirectory()) { + const directoryItems = returnType.strict ? await fs2.readdir(itemPath) : await fs2.readdir(itemPath).catch((error2) => errors.push(error2)); + if (typeof directoryItems !== "object") return; + await Promise.all( + directoryItems.map( + (directoryItem) => processItem((0, import_node_path.join)(itemPath, directoryItem)) + ) + ); + } + } + if (!options.bigint) { + if (folderSize > BigInt(Number.MAX_SAFE_INTEGER)) { + const error2 = new RangeError( + "The folder size is too large to return as a Number. You can instruct this package to return a BigInt instead." + ); + if (returnType.strict) { + throw error2; + } + errors.push(error2); + folderSize = Number.MAX_SAFE_INTEGER; + } else { + folderSize = Number(folderSize); + } + } + if (returnType.errors) { + return { + size: folderSize, + errors: errors.length > 0 ? errors : null + }; + } else { + return folderSize; + } +} // node_modules/js-yaml/dist/js-yaml.mjs function isNothing(subject) { @@ -123217,7 +123150,7 @@ var json = failsafe.extend({ float ] }); -var core = json; +var core2 = json; var YAML_DATE_REGEXP = new RegExp( "^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$" ); @@ -123431,7 +123364,7 @@ var set = new type("tag:yaml.org,2002:set", { resolve: resolveYamlSet, construct: constructYamlSet }); -var _default = core.extend({ +var _default = core2.extend({ implicit: [ timestamp, merge @@ -125291,7 +125224,7 @@ function checkGitHubVersionInRange(version, logger) { ); } hasBeenWarnedAboutVersion = true; - core2.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true); + core3.exportVariable(CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR, true); } function apiVersionInRange(version, minimumVersion2, maximumVersion2) { if (!semver.satisfies(version, `>=${minimumVersion2}`)) { @@ -125321,14 +125254,14 @@ function getErrorMessage(error2) { // src/actions-util.ts var pkg = require_package(); var getRequiredInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); if (!value) { throw new ConfigurationError(`Input required and not supplied: ${name}`); } return value; }; var getOptionalInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); return value.length > 0 ? value : void 0; }; function getTemporaryDirectory() { @@ -125340,7 +125273,7 @@ function getActionVersion() { } var persistedInputsKey = "persisted_inputs"; var restoreInputs = function() { - const persistedInputs = core3.getState(persistedInputsKey); + const persistedInputs = core4.getState(persistedInputsKey); if (persistedInputs) { for (const [name, value] of JSON.parse(persistedInputs)) { process.env[name] = value; @@ -125349,7 +125282,7 @@ var restoreInputs = function() { }; // src/api-client.ts -var core4 = __toESM(require_core()); +var core5 = __toESM(require_core()); var githubUtils = __toESM(require_utils4()); var retry = __toESM(require_dist_node15()); var import_console_log_level = __toESM(require_console_log_level()); @@ -125405,7 +125338,7 @@ var fs = __toESM(require("fs")); var path = __toESM(require("path")); var artifact = __toESM(require_artifact2()); var artifactLegacy = __toESM(require_artifact_client2()); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); var import_archiver = __toESM(require_archiver()); var import_del3 = __toESM(require_del()); @@ -125414,10 +125347,10 @@ var io5 = __toESM(require_io()); var import_del2 = __toESM(require_del()); // src/autobuild.ts -var core10 = __toESM(require_core()); +var core11 = __toESM(require_core()); // src/codeql.ts -var core9 = __toESM(require_core()); +var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); // src/cli-errors.ts @@ -125565,7 +125498,7 @@ var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { var supportedAnalysisKinds = new Set(Object.values(AnalysisKind)); // src/caching-utils.ts -var core5 = __toESM(require_core()); +var core6 = __toESM(require_core()); // src/feature-flags.ts var semver3 = __toESM(require_semver2()); @@ -125574,21 +125507,21 @@ var semver3 = __toESM(require_semver2()); var actionsCache = __toESM(require_cache3()); // src/git-utils.ts -var core6 = __toESM(require_core()); +var core7 = __toESM(require_core()); var toolrunner2 = __toESM(require_toolrunner()); var io3 = __toESM(require_io()); // src/logging.ts -var core7 = __toESM(require_core()); +var core8 = __toESM(require_core()); function getActionsLogger() { - return core7; + return core8; } function withGroup(groupName, f) { - core7.startGroup(groupName); + core8.startGroup(groupName); try { return f(); } finally { - core7.endGroup(); + core8.endGroup(); } } @@ -125821,7 +125754,7 @@ var toolcache = __toESM(require_tool_cache()); var semver5 = __toESM(require_semver2()); // src/tools-download.ts -var core8 = __toESM(require_core()); +var core9 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); var toolcache2 = __toESM(require_tool_cache()); var import_follow_redirects = __toESM(require_follow_redirects()); @@ -125879,7 +125812,7 @@ async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghV } const uploadSupported = isSafeArtifactUpload(codeQlVersion); if (!uploadSupported) { - core11.info( + core12.info( `Skipping debug artifact upload because the current CLI does not support safe upload. Please upgrade to CLI v${SafeArtifactUploadVersion} or later.` ); return "upload-not-supported"; @@ -125893,7 +125826,7 @@ async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghV ).sort()) suffix += `-${matrixVal}`; } catch { - core11.info( + core12.info( "Could not parse user-specified `matrix` input into JSON. The debug artifact will not be named with the user's `matrix` input." ); } @@ -125911,7 +125844,7 @@ async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghV ); return "upload-successful"; } catch (e) { - core11.warning(`Failed to upload debug artifacts: ${e}`); + core12.warning(`Failed to upload debug artifacts: ${e}`); return "upload-failed"; } } @@ -125938,7 +125871,7 @@ async function runWrapper() { checkGitHubVersionInRange(gitHubVersion, logger); if (process.env["CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */] !== "true") { if (gitHubVersion.type === void 0) { - core12.warning( + core13.warning( `Did not upload debug artifacts because cannot determine the GitHub variant running.` ); return; @@ -125955,7 +125888,7 @@ async function runWrapper() { ); } } catch (error2) { - core12.setFailed( + core13.setFailed( `upload-sarif post-action step failed: ${getErrorMessage(error2)}` ); } diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index 414601a7a..90d73b080 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -34336,127 +34336,6 @@ var require_del = __commonJS({ } }); -// node_modules/tiny-each-async/index.js -var require_tiny_each_async = __commonJS({ - "node_modules/tiny-each-async/index.js"(exports2, module2) { - "use strict"; - module2.exports = function eachAsync(arr, parallelLimit, iteratorFn, cb) { - var pending = 0; - var index = 0; - var lastIndex = arr.length - 1; - var called = false; - var limit; - var callback; - var iterate; - if (typeof parallelLimit === "number") { - limit = parallelLimit; - iterate = iteratorFn; - callback = cb || function noop() { - }; - } else { - iterate = parallelLimit; - callback = iteratorFn || function noop() { - }; - limit = arr.length; - } - if (!arr.length) { - return callback(); - } - var iteratorLength = iterate.length; - var shouldCallNextIterator = function shouldCallNextIterator2() { - return !called && pending < limit && index < lastIndex; - }; - var iteratorCallback = function iteratorCallback2(err) { - if (called) { - return; - } - pending--; - if (err || index === lastIndex && !pending) { - called = true; - callback(err); - } else if (shouldCallNextIterator()) { - processIterator(++index); - } - }; - var processIterator = function processIterator2() { - pending++; - var args = iteratorLength === 2 ? [arr[index], iteratorCallback] : [arr[index], index, iteratorCallback]; - iterate.apply(null, args); - if (shouldCallNextIterator()) { - processIterator2(++index); - } - }; - processIterator(); - }; - } -}); - -// node_modules/get-folder-size/index.js -var require_get_folder_size = __commonJS({ - "node_modules/get-folder-size/index.js"(exports2, module2) { - "use strict"; - var fs13 = require("fs"); - var path12 = require("path"); - var eachAsync = require_tiny_each_async(); - function readSizeRecursive(seen, item, ignoreRegEx, callback) { - let cb; - let ignoreRegExp; - if (!callback) { - cb = ignoreRegEx; - ignoreRegExp = null; - } else { - cb = callback; - ignoreRegExp = ignoreRegEx; - } - fs13.lstat(item, function lstat(e, stats) { - let total = !e ? stats.size || 0 : 0; - if (stats) { - if (seen.has(stats.ino)) { - return cb(null, 0); - } - seen.add(stats.ino); - } - if (!e && stats.isDirectory()) { - fs13.readdir(item, (err, list) => { - if (err) { - return cb(err); - } - eachAsync( - list, - 5e3, - (dirItem, next) => { - readSizeRecursive( - seen, - path12.join(item, dirItem), - ignoreRegExp, - (error2, size) => { - if (!error2) { - total += size; - } - next(error2); - } - ); - }, - (finalErr) => { - cb(finalErr, total); - } - ); - }); - } else { - if (ignoreRegExp && ignoreRegExp.test(item)) { - total = 0; - } - cb(e, total); - } - }); - } - module2.exports = (...args) => { - args.unshift(/* @__PURE__ */ new Set()); - return readSizeRecursive(...args); - }; - } -}); - // node_modules/semver/internal/constants.js var require_constants9 = __commonJS({ "node_modules/semver/internal/constants.js"(exports2, module2) { @@ -36412,7 +36291,7 @@ var require_package = __commonJS({ "fast-deep-equal": "^3.1.3", "file-url": "^3.0.0", "follow-redirects": "^1.15.11", - "get-folder-size": "^2.0.1", + "get-folder-size": "^5.0.0", "js-yaml": "^4.1.0", jsonschema: "1.4.1", long: "^5.3.2", @@ -38076,7 +37955,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -38086,15 +37965,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core13.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -38758,7 +38637,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs13 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path12 = __importStar4(require("path")); @@ -38809,7 +38688,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core13.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await4(fs13.promises.lstat(searchPath)); } catch (err) { @@ -38881,7 +38760,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core13.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -38897,7 +38776,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -40221,7 +40100,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob = __importStar4(require_glob2()); var io6 = __importStar4(require_io()); @@ -40274,7 +40153,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path12.relative(workspace, file).replace(new RegExp(`\\${path12.sep}`, "g"), "/"); - core13.debug(`Matched: ${relativeFile}`); + core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -40304,7 +40183,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core13.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -40315,10 +40194,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core13.debug(err.message); + core14.debug(err.message); } versionOutput = versionOutput.trim(); - core13.debug(versionOutput); + core14.debug(versionOutput); return versionOutput; }); } @@ -40326,7 +40205,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core13.debug(`zstd version: ${version}`); + core14.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -75638,7 +75517,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -75680,7 +75559,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -75735,14 +75614,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core13.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core13.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -75813,7 +75692,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -75874,9 +75753,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core13.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core13.debug(`${name} - Error is not retryable`); + core14.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -75981,7 +75860,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -76019,7 +75898,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core13.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -76053,7 +75932,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -76103,7 +75982,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core13.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -76114,7 +75993,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core13.debug("Unable to validate download, no Content-Length header"); + core14.debug("Unable to validate download, no Content-Length header"); } }); } @@ -76234,7 +76113,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core13.debug("Unable to determine content length, downloading file with http-client..."); + core14.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -76314,7 +76193,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -76334,9 +76213,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core13.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -76373,12 +76252,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Download concurrency: ${result.downloadConcurrency}`); - core13.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core13.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core13.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core13.debug(`Lookup only: ${result.lookupOnly}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Download concurrency: ${result.downloadConcurrency}`); + core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core14.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -76558,7 +76437,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs13 = __importStar4(require("fs")); @@ -76576,7 +76455,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core13.debug(`Resource Url: ${url}`); + core14.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -76604,7 +76483,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core13.isDebug()) { + if (core14.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -76617,9 +76496,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core13.setSecret(cacheDownloadUrl); - core13.debug(`Cache Result:`); - core13.debug(JSON.stringify(cacheResult)); + core14.setSecret(cacheDownloadUrl); + core14.debug(`Cache Result:`); + core14.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -76634,10 +76513,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core13.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core13.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -76682,7 +76561,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core13.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -76704,7 +76583,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core13.debug("Awaiting all uploads"); + core14.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -76747,16 +76626,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core13.debug("Upload cache"); + core14.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core13.debug("Commiting cache"); + core14.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core13.info("Cache saved successfully"); + core14.info("Cache saved successfully"); } }); } @@ -82182,7 +82061,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var path12 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -82235,7 +82114,7 @@ var require_cache3 = __commonJS({ function restoreCache3(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -82251,8 +82130,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82270,19 +82149,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path12.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -82290,16 +82169,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core13.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82310,8 +82189,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -82329,30 +82208,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core13.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core14.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core13.info(`Cache hit for restore-key: ${response.matchedKey}`); + core14.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core13.info(`Cache hit for: ${response.matchedKey}`); + core14.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path12.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive path: ${archivePath}`); - core13.debug(`Starting download of archive to: ${archivePath}`); + core14.debug(`Archive path: ${archivePath}`); + core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core13.isDebug()) { + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -82360,9 +82239,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core13.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -82371,7 +82250,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -82380,7 +82259,7 @@ var require_cache3 = __commonJS({ function saveCache3(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -82399,26 +82278,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path12.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -82431,26 +82310,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core13.debug(`Saving Cache (ID: ${cacheId})`); + core14.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -82463,26 +82342,26 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path12.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } options.archiveSizeBytes = archiveFileSize; - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -82496,10 +82375,10 @@ var require_cache3 = __commonJS({ } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core13.debug(`Failed to reserve cache: ${error2}`); + core14.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core13.debug(`Attempting to upload cache located at: ${archivePath}`); + core14.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -82507,7 +82386,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core13.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); } @@ -82517,19 +82396,19 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -84060,7 +83939,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -84083,10 +83962,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core13.info(err.message); + core14.info(err.message); } const seconds = this.getSleepAmount(); - core13.info(`Waiting ${seconds} seconds before trying again`); + core14.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -84166,7 +84045,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core13 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs13 = __importStar4(require("fs")); @@ -84195,8 +84074,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path12.join(_getTempDirectory(), crypto.randomUUID()); yield io6.mkdirP(path12.dirname(dest)); - core13.debug(`Downloading ${url}`); - core13.debug(`Destination ${dest}`); + core14.debug(`Downloading ${url}`); + core14.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -84223,7 +84102,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core13.debug("set auth"); + core14.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -84232,7 +84111,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError(response.message.statusCode); - core13.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core14.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -84241,16 +84120,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs13.createWriteStream(dest)); - core13.debug("download complete"); + core14.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core13.debug("download failed"); + core14.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core13.debug(`Failed to delete '${dest}'. ${err.message}`); + core14.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -84265,7 +84144,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core13.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -84315,7 +84194,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core13.debug("Checking tar --version"); + core14.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -84325,7 +84204,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core13.debug(versionOutput.trim()); + core14.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -84333,7 +84212,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core13.isDebug() && !flags.includes("v")) { + if (core14.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -84365,7 +84244,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core13.isDebug()) { + if (core14.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -84410,7 +84289,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core13.debug(`Using pwsh at path: ${pwshPath}`); + core14.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -84430,7 +84309,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core13.debug(`Using powershell at path: ${powershellPath}`); + core14.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -84439,7 +84318,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core13.isDebug()) { + if (!core14.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -84450,8 +84329,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch2}`); - core13.debug(`source dir: ${sourceDir}`); + core14.debug(`Caching tool ${tool} ${version} ${arch2}`); + core14.debug(`source dir: ${sourceDir}`); if (!fs13.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -84469,14 +84348,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch2}`); - core13.debug(`source file: ${sourceFile}`); + core14.debug(`Caching tool ${tool} ${version} ${arch2}`); + core14.debug(`source file: ${sourceFile}`); if (!fs13.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path12.join(destFolder, targetFile); - core13.debug(`destination file ${destPath}`); + core14.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -84500,12 +84379,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path12.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core13.debug(`checking cache: ${cachePath}`); + core14.debug(`checking cache: ${cachePath}`); if (fs13.existsSync(cachePath) && fs13.existsSync(`${cachePath}.complete`)) { - core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core13.debug("not found"); + core14.debug("not found"); } } return toolPath; @@ -84536,7 +84415,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core13.debug("set auth"); + core14.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -84557,7 +84436,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core13.debug("Invalid json"); + core14.debug("Invalid json"); } } return releases; @@ -84583,7 +84462,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path12.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); - core13.debug(`destination ${folderPath}`); + core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -84595,19 +84474,19 @@ var require_tool_cache = __commonJS({ const folderPath = path12.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs13.writeFileSync(markerPath, ""); - core13.debug("finished caching tool"); + core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core13.debug(`isExplicit: ${c}`); + core14.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core13.debug(`explicit? ${valid3}`); + core14.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core13.debug(`evaluating ${versions.length} versions`); + core14.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -84623,9 +84502,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core13.debug(`matched: ${version}`); + core14.debug(`matched: ${version}`); } else { - core13.debug("match not found"); + core14.debug("match not found"); } return version; } @@ -88099,19 +87978,19 @@ var require_sarif_schema_2_1_0 = __commonJS({ // src/upload-sarif-action.ts var fs12 = __toESM(require("fs")); -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); // src/actions-util.ts var fs = __toESM(require("fs")); var path2 = __toESM(require("path")); -var core3 = __toESM(require_core()); +var core4 = __toESM(require_core()); var toolrunner = __toESM(require_toolrunner()); var github = __toESM(require_github()); var io2 = __toESM(require_io()); // src/util.ts var path = __toESM(require("path")); -var core2 = __toESM(require_core()); +var core3 = __toESM(require_core()); var exec = __toESM(require_exec()); var io = __toESM(require_io()); @@ -88252,7 +88131,61 @@ function checkDiskSpace(directoryPath, dependencies = { // src/util.ts var import_del = __toESM(require_del()); -var import_get_folder_size = __toESM(require_get_folder_size()); + +// node_modules/get-folder-size/index.js +var import_node_path2 = require("node:path"); +async function getFolderSize(itemPath, options) { + return await core(itemPath, options, { errors: true }); +} +getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); +getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); +async function core(rootItemPath, options = {}, returnType = {}) { + const fs13 = options.fs || await import("node:fs/promises"); + let folderSize = 0n; + const foundInos = /* @__PURE__ */ new Set(); + const errors = []; + await processItem(rootItemPath); + async function processItem(itemPath) { + if (options.ignore?.test(itemPath)) return; + const stats = returnType.strict ? await fs13.lstat(itemPath, { bigint: true }) : await fs13.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + if (typeof stats !== "object") return; + if (!foundInos.has(stats.ino)) { + foundInos.add(stats.ino); + folderSize += stats.size; + } + if (stats.isDirectory()) { + const directoryItems = returnType.strict ? await fs13.readdir(itemPath) : await fs13.readdir(itemPath).catch((error2) => errors.push(error2)); + if (typeof directoryItems !== "object") return; + await Promise.all( + directoryItems.map( + (directoryItem) => processItem((0, import_node_path2.join)(itemPath, directoryItem)) + ) + ); + } + } + if (!options.bigint) { + if (folderSize > BigInt(Number.MAX_SAFE_INTEGER)) { + const error2 = new RangeError( + "The folder size is too large to return as a Number. You can instruct this package to return a BigInt instead." + ); + if (returnType.strict) { + throw error2; + } + errors.push(error2); + folderSize = Number.MAX_SAFE_INTEGER; + } else { + folderSize = Number(folderSize); + } + } + if (returnType.errors) { + return { + size: folderSize, + errors: errors.length > 0 ? errors : null + }; + } else { + return folderSize; + } +} // node_modules/js-yaml/dist/js-yaml.mjs function isNothing(subject) { @@ -88823,7 +88756,7 @@ var json = failsafe.extend({ float ] }); -var core = json; +var core2 = json; var YAML_DATE_REGEXP = new RegExp( "^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$" ); @@ -89037,7 +88970,7 @@ var set = new type("tag:yaml.org,2002:set", { resolve: resolveYamlSet, construct: constructYamlSet }); -var _default = core.extend({ +var _default = core2.extend({ implicit: [ timestamp, merge @@ -90909,11 +90842,11 @@ function assertNever(value) { throw new ExhaustivityCheckingError(value); } function initializeEnvironment(version) { - core2.exportVariable("CODEQL_ACTION_FEATURE_MULTI_LANGUAGE" /* FEATURE_MULTI_LANGUAGE */, "false"); - core2.exportVariable("CODEQL_ACTION_FEATURE_SANDWICH" /* FEATURE_SANDWICH */, "false"); - core2.exportVariable("CODEQL_ACTION_FEATURE_SARIF_COMBINE" /* FEATURE_SARIF_COMBINE */, "true"); - core2.exportVariable("CODEQL_ACTION_FEATURE_WILL_UPLOAD" /* FEATURE_WILL_UPLOAD */, "true"); - core2.exportVariable("CODEQL_ACTION_VERSION" /* VERSION */, version); + core3.exportVariable("CODEQL_ACTION_FEATURE_MULTI_LANGUAGE" /* FEATURE_MULTI_LANGUAGE */, "false"); + core3.exportVariable("CODEQL_ACTION_FEATURE_SANDWICH" /* FEATURE_SANDWICH */, "false"); + core3.exportVariable("CODEQL_ACTION_FEATURE_SARIF_COMBINE" /* FEATURE_SARIF_COMBINE */, "true"); + core3.exportVariable("CODEQL_ACTION_FEATURE_WILL_UPLOAD" /* FEATURE_WILL_UPLOAD */, "true"); + core3.exportVariable("CODEQL_ACTION_VERSION" /* VERSION */, version); } function getRequiredEnvParam(paramName) { const value = process.env[paramName]; @@ -90994,7 +90927,7 @@ async function checkDiskUsage(logger) { } else { logger.debug(message); } - core2.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true"); + core3.exportVariable("CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE" /* HAS_WARNED_ABOUT_DISK_SPACE */, "true"); } return { numAvailableBytes: diskUsage.free, @@ -91014,10 +90947,10 @@ function checkActionVersion(version, githubVersion) { semver.coerce(githubVersion.version) ?? "0.0.0", ">=3.11" )) { - core2.error( + core3.error( "CodeQL Action major versions v1 and v2 have been deprecated. Please update all occurrences of the CodeQL Action in your workflow files to v3. For more information, see https://github.blog/changelog/2025-01-10-code-scanning-codeql-action-v2-is-now-deprecated/" ); - core2.exportVariable("CODEQL_ACTION_DID_LOG_VERSION_DEPRECATION" /* LOG_VERSION_DEPRECATION */, "true"); + core3.exportVariable("CODEQL_ACTION_DID_LOG_VERSION_DEPRECATION" /* LOG_VERSION_DEPRECATION */, "true"); } } } @@ -91042,13 +90975,13 @@ async function checkSipEnablement(logger) { if (sipStatusOutput.stdout.includes( "System Integrity Protection status: enabled." )) { - core2.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true"); + core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "true"); return true; } if (sipStatusOutput.stdout.includes( "System Integrity Protection status: disabled." )) { - core2.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false"); + core3.exportVariable("CODEQL_ACTION_IS_SIP_ENABLED" /* IS_SIP_ENABLED */, "false"); return false; } } @@ -91095,14 +91028,14 @@ async function asyncSome(array, predicate) { // src/actions-util.ts var pkg = require_package(); var getRequiredInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); if (!value) { throw new ConfigurationError(`Input required and not supplied: ${name}`); } return value; }; var getOptionalInput = function(name) { - const value = core3.getInput(name); + const value = core4.getInput(name); return value.length > 0 ? value : void 0; }; function getTemporaryDirectory() { @@ -91231,7 +91164,7 @@ var persistInputs = function() { const inputEnvironmentVariables = Object.entries(process.env).filter( ([name]) => name.startsWith("INPUT_") ); - core3.saveState(persistedInputsKey, JSON.stringify(inputEnvironmentVariables)); + core4.saveState(persistedInputsKey, JSON.stringify(inputEnvironmentVariables)); }; var qualityCategoryMapping = { "c#": "csharp", @@ -91259,7 +91192,7 @@ function fixCodeQualityCategory(logger, category) { } // src/api-client.ts -var core4 = __toESM(require_core()); +var core5 = __toESM(require_core()); var githubUtils = __toESM(require_utils4()); var retry = __toESM(require_dist_node15()); var import_console_log_level = __toESM(require_console_log_level()); @@ -91364,7 +91297,7 @@ async function getAnalysisKey() { const workflowPath = await getWorkflowRelativePath(); const jobName = getRequiredEnvParam("GITHUB_JOB"); analysisKey = `${workflowPath}:${jobName}`; - core4.exportVariable(analysisKeyEnvVar, analysisKey); + core5.exportVariable(analysisKeyEnvVar, analysisKey); return analysisKey; } function computeAutomationID(analysis_key, environment) { @@ -91409,13 +91342,13 @@ var path3 = __toESM(require("path")); var actionsCache = __toESM(require_cache3()); // src/git-utils.ts -var core5 = __toESM(require_core()); +var core6 = __toESM(require_core()); var toolrunner2 = __toESM(require_toolrunner()); var io3 = __toESM(require_io()); var runGitCommand = async function(workingDirectory, args, customErrorMessage) { let stdout = ""; let stderr = ""; - core5.debug(`Running git command: git ${args.join(" ")}`); + core6.debug(`Running git command: git ${args.join(" ")}`); try { await new toolrunner2.ToolRunner(await io3.which("git", true), args, { silent: true, @@ -91435,7 +91368,7 @@ var runGitCommand = async function(workingDirectory, args, customErrorMessage) { if (stderr.includes("not a git repository")) { reason = "The checkout path provided to the action does not appear to be a git repository."; } - core5.info(`git call failed. ${customErrorMessage} Error: ${reason}`); + core6.info(`git call failed. ${customErrorMessage} Error: ${reason}`); throw error2; } }; @@ -91580,7 +91513,7 @@ async function getRef() { ) !== head; if (hasChangedRef) { const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head"); - core5.debug( + core6.debug( `No longer on merge commit, rewriting ref from ${ref} to ${newRef}.` ); return newRef; @@ -91606,9 +91539,9 @@ async function isAnalyzingDefaultBranch() { } // src/logging.ts -var core6 = __toESM(require_core()); +var core7 = __toESM(require_core()); function getActionsLogger() { - return core6; + return core7; } function formatDuration(durationMs) { if (durationMs < 1e3) { @@ -92118,7 +92051,7 @@ var GitHubFeatureFlags = class { // src/status-report.ts var os = __toESM(require("os")); -var core7 = __toESM(require_core()); +var core8 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -92137,12 +92070,12 @@ function getActionsStatus(error2, otherFailureCause) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core7.exportVariable( + core8.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core7.exportVariable( + core8.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -92161,14 +92094,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core7.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core8.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core7.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core8.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -92246,9 +92179,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scan async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core7.debug(`Sending status report: ${statusReportJSON}`); + core8.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core7.debug("In test mode. Status reports are not uploaded."); + core8.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -92267,26 +92200,26 @@ async function sendStatusReport(statusReport) { switch (e.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core7.warning( + core8.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading Code Scanning results requires write access. To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core7.warning(e.message); + core8.warning(e.message); } return; case 404: - core7.warning(e.message); + core8.warning(e.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core7.debug(INCOMPATIBLE_MSG); + core8.debug(INCOMPATIBLE_MSG); } else { - core7.debug(OUT_OF_DATE_MSG); + core8.debug(OUT_OF_DATE_MSG); } return; } } - core7.warning( + core8.warning( `An unexpected error occurred when sending code scanning status report: ${getErrorMessage( e )}` @@ -92298,14 +92231,14 @@ async function sendStatusReport(statusReport) { var fs11 = __toESM(require("fs")); var path11 = __toESM(require("path")); var import_zlib = __toESM(require("zlib")); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); var import_file_url = __toESM(require_file_url()); var jsonschema = __toESM(require_lib2()); // src/codeql.ts var fs9 = __toESM(require("fs")); var path9 = __toESM(require("path")); -var core10 = __toESM(require_core()); +var core11 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); // src/cli-errors.ts @@ -92559,7 +92492,7 @@ var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { var supportedAnalysisKinds = new Set(Object.values(AnalysisKind)); // src/caching-utils.ts -var core8 = __toESM(require_core()); +var core9 = __toESM(require_core()); // src/diff-informed-analysis-utils.ts var fs4 = __toESM(require("fs")); @@ -92906,7 +92839,7 @@ var fs7 = __toESM(require("fs")); var os2 = __toESM(require("os")); var path7 = __toESM(require("path")); var import_perf_hooks = require("perf_hooks"); -var core9 = __toESM(require_core()); +var core10 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); var toolcache2 = __toESM(require_tool_cache()); var import_follow_redirects = __toESM(require_follow_redirects()); @@ -92960,10 +92893,10 @@ async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorizat }; } } catch (e) { - core9.warning( + core10.warning( `Failed to download and extract CodeQL bundle using streaming with error: ${getErrorMessage(e)}` ); - core9.warning(`Falling back to downloading the bundle before extracting.`); + core10.warning(`Falling back to downloading the bundle before extracting.`); await cleanUpGlob(dest, "CodeQL bundle", logger); } const toolsDownloadStart = import_perf_hooks.performance.now(); @@ -94011,12 +93944,12 @@ ${output}` ); } else if (checkVersion && process.env["CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */] !== "true" && !await codeQlVersionAtLeast(codeql, CODEQL_NEXT_MINIMUM_VERSION)) { const result = await codeql.getVersion(); - core10.warning( + core11.warning( `CodeQL CLI version ${result.version} was discontinued on ${GHES_MOST_RECENT_DEPRECATION_DATE} alongside GitHub Enterprise Server ${GHES_VERSION_MOST_RECENTLY_DEPRECATED} and will not be supported by the next minor release of the CodeQL Action. Please update to CodeQL CLI version ${CODEQL_NEXT_MINIMUM_VERSION} or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version. Alternatively, if you want to continue using CodeQL CLI version ${result.version}, you can replace 'github/codeql-action/*@v${getActionVersion().split(".")[0]}' by 'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to continue using this version of the CodeQL Action.` ); - core10.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); + core11.exportVariable("CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING" /* SUPPRESS_DEPRECATED_SOON_WARNING */, "true"); } return codeql; } @@ -95374,7 +95307,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core11.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core12.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -95424,7 +95357,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple CodeQL runs with the same category is deprecated ${deprecationWarningMessage} for CodeQL CLI 2.16.6 and earlier. Please update your CodeQL CLI version or update your workflow to set a distinct category for each CodeQL run. ${deprecationMoreInformationMessage}` ); - core11.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core12.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -95489,13 +95422,13 @@ async function uploadPayload(payload, repositoryNwo, logger, target) { if (isHTTPError(e)) { switch (e.status) { case 403: - core11.warning(e.message || GENERIC_403_MSG); + core12.warning(e.message || GENERIC_403_MSG); break; case 404: - core11.warning(e.message || GENERIC_404_MSG); + core12.warning(e.message || GENERIC_404_MSG); break; default: - core11.warning(e.message); + core12.warning(e.message); break; } } @@ -95841,7 +95774,7 @@ function validateUniqueCategory(sarif, sentinelPrefix = CodeScanningTarget.senti `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core11.exportVariable(sentinelEnvVar, sentinelEnvVar); + core12.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -95937,7 +95870,7 @@ async function run() { logger, CodeScanningTarget ); - core12.setOutput("sarif-id", uploadResult.sarifID); + core13.setOutput("sarif-id", uploadResult.sarifID); if (fs12.lstatSync(sarifPath).isDirectory()) { const qualitySarifFiles = findSarifFilesInDir( sarifPath, @@ -95955,7 +95888,7 @@ async function run() { } } if (isInTestMode()) { - core12.debug("In test mode. Waiting for processing is disabled."); + core13.debug("In test mode. Waiting for processing is disabled."); } else if (getRequiredInput("wait-for-processing") === "true") { await waitForProcessing( getRepositoryNwo(), @@ -95967,7 +95900,7 @@ async function run() { } catch (unwrappedError) { const error2 = isThirdPartyAnalysis("upload-sarif" /* UploadSarif */) && unwrappedError instanceof InvalidSarifUploadError ? new ConfigurationError(unwrappedError.message) : wrapError(unwrappedError); const message = error2.message; - core12.setFailed(message); + core13.setFailed(message); const errorStatusReportBase = await createStatusReportBase( "upload-sarif" /* UploadSarif */, getActionsStatus(error2), @@ -95988,7 +95921,7 @@ async function runWrapper() { try { await run(); } catch (error2) { - core12.setFailed( + core13.setFailed( `codeql/upload-sarif action failed: ${getErrorMessage(error2)}` ); } diff --git a/package-lock.json b/package-lock.json index a3e0e24f2..70692f17b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -28,7 +28,7 @@ "fast-deep-equal": "^3.1.3", "file-url": "^3.0.0", "follow-redirects": "^1.15.11", - "get-folder-size": "^2.0.1", + "get-folder-size": "^5.0.0", "js-yaml": "^4.1.0", "jsonschema": "1.4.1", "long": "^5.3.2", @@ -6041,10 +6041,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/gar": { - "version": "1.0.4", - "license": "MIT" - }, "node_modules/get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -6067,14 +6063,15 @@ } }, "node_modules/get-folder-size": { - "version": "2.0.1", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/get-folder-size/-/get-folder-size-5.0.0.tgz", + "integrity": "sha512-+fgtvbL83tSDypEK+T411GDBQVQtxv+qtQgbV+HVa/TYubqDhNd5ghH/D6cOHY9iC5/88GtOZB7WI8PXy2A3bg==", "license": "MIT", - "dependencies": { - "gar": "^1.0.4", - "tiny-each-async": "2.0.3" - }, "bin": { - "get-folder-size": "bin/get-folder-size" + "get-folder-size": "bin/get-folder-size.js" + }, + "engines": { + "node": ">=18.11.0" } }, "node_modules/get-intrinsic": { @@ -8689,10 +8686,6 @@ "node": ">=4" } }, - "node_modules/tiny-each-async": { - "version": "2.0.3", - "license": "MIT" - }, "node_modules/tinyglobby": { "version": "0.2.12", "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.12.tgz", diff --git a/package.json b/package.json index 06ad3fdf4..2308ac300 100644 --- a/package.json +++ b/package.json @@ -42,7 +42,7 @@ "fast-deep-equal": "^3.1.3", "file-url": "^3.0.0", "follow-redirects": "^1.15.11", - "get-folder-size": "^2.0.1", + "get-folder-size": "^5.0.0", "js-yaml": "^4.1.0", "jsonschema": "1.4.1", "long": "^5.3.2", diff --git a/src/util.ts b/src/util.ts index 1467cc9c2..ef024ffde 100644 --- a/src/util.ts +++ b/src/util.ts @@ -1,7 +1,6 @@ import * as fs from "fs"; import * as os from "os"; import * as path from "path"; -import { promisify } from "util"; import * as core from "@actions/core"; import * as exec from "@actions/exec/lib/exec"; @@ -831,7 +830,8 @@ export async function tryGetFolderBytes( quiet: boolean = false, ): Promise { try { - return await promisify(getFolderSize)(cacheDir); + // tolerate some errors since we're only estimating the size + return await getFolderSize.loose(cacheDir); } catch (e) { if (!quiet || logger.isDebug()) { logger.warning(