Merge remote-tracking branch 'origin/releases/v3' into backport-v2.27.7-babb554ed

This commit is contained in:
github-actions[bot]
2024-12-10 13:42:05 +00:00
5615 changed files with 323257 additions and 183852 deletions

1
node_modules/.bin/is-docker generated vendored
View File

@@ -1 +0,0 @@
../is-docker/cli.js

View File

@@ -1 +0,0 @@
../is-inside-container/cli.js

1098
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,20 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac
Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 10 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 10 GB.
## ⚠️ Important changes
The cache backend service has been rewritten from the ground up for improved performance and reliability. The [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) package now integrates with the new cache service (v2) APIs.
The new service will gradually roll out as of **February 1st, 2025**. The legacy service will also be sunset on the same date. Changes in this release are **fully backward compatible**.
**All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.**
If you do not upgrade, all workflow runs using any of the deprecated [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) packages will fail.
Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`.
Read more about the change & access the migration guide: [reference to the announcement](https://github.com/actions/toolkit/discussions/1890).
## Usage
This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache).
@@ -47,5 +61,3 @@ const cacheKey = await cache.restoreCache(paths, key, restoreKeys)
A cache gets downloaded in multiple segments of fixed sizes (now `128MB` to fail-fast, previously `1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner were used). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.4` of cache package introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss.
Default value of this timeout is 10 minutes (starting `v3.2.1` and higher, previously 60 minutes in versions between `v.3.0.4` and `v3.2.0`, both included) and can be customized by specifying an [environment variable](https://docs.github.com/en/actions/learn-github-actions/environment-variables) named `SEGMENT_DOWNLOAD_TIMEOUT_MINS` with timeout value in minutes.

View File

@@ -15,8 +15,8 @@ export declare function isFeatureAvailable(): boolean;
* Restores cache from keys
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined

View File

@@ -37,7 +37,10 @@ const core = __importStar(require("@actions/core"));
const path = __importStar(require("path"));
const utils = __importStar(require("./internal/cacheUtils"));
const cacheHttpClient = __importStar(require("./internal/cacheHttpClient"));
const cacheTwirpClient = __importStar(require("./internal/shared/cacheTwirpClient"));
const config_1 = require("./internal/config");
const tar_1 = require("./internal/tar");
const constants_1 = require("./internal/constants");
class ValidationError extends Error {
constructor(message) {
super(message);
@@ -81,15 +84,39 @@ exports.isFeatureAvailable = isFeatureAvailable;
* Restores cache from keys
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core.debug(`Cache service version: ${cacheServiceVersion}`);
checkPaths(paths);
switch (cacheServiceVersion) {
case 'v2':
return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
case 'v1':
default:
return yield restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
}
});
}
exports.restoreCache = restoreCache;
/**
* Restores cache using the legacy Cache Service
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param options cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
restoreKeys = restoreKeys || [];
const keys = [primaryKey, ...restoreKeys];
core.debug('Resolved Keys:');
@@ -151,7 +178,85 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
return undefined;
});
}
exports.restoreCache = restoreCache;
/**
* Restores cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
// Override UploadOptions to force the use of Azure
options = Object.assign(Object.assign({}, options), { useAzureSdk: true });
restoreKeys = restoreKeys || [];
const keys = [primaryKey, ...restoreKeys];
core.debug('Resolved Keys:');
core.debug(JSON.stringify(keys));
if (keys.length > 10) {
throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`);
}
for (const key of keys) {
checkKey(key);
}
let archivePath = '';
try {
const twirpClient = cacheTwirpClient.internalCacheTwirpClient();
const compressionMethod = yield utils.getCompressionMethod();
const request = {
key: primaryKey,
restoreKeys,
version: utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive)
};
const response = yield twirpClient.GetCacheEntryDownloadURL(request);
if (!response.ok) {
core.warning(`Cache not found for keys: ${keys.join(', ')}`);
return undefined;
}
core.info(`Cache hit for: ${request.key}`);
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
core.info('Lookup only - skipping download');
return response.matchedKey;
}
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive path: ${archivePath}`);
core.debug(`Starting download of archive to: ${archivePath}`);
yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options);
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
if (core.isDebug()) {
yield (0, tar_1.listTar)(archivePath, compressionMethod);
}
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
core.info('Cache restored successfully');
return response.matchedKey;
}
catch (error) {
const typedError = error;
if (typedError.name === ValidationError.name) {
throw error;
}
else {
// Supress all non-validation cache related errors because caching should be optional
core.warning(`Failed to restore: ${error.message}`);
}
}
finally {
try {
if (archivePath) {
yield utils.unlinkFile(archivePath);
}
}
catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
return undefined;
});
}
/**
* Saves a list of files with the specified key
*
@@ -162,10 +267,33 @@ exports.restoreCache = restoreCache;
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
*/
function saveCache(paths, key, options, enableCrossOsArchive = false) {
var _a, _b, _c, _d, _e;
return __awaiter(this, void 0, void 0, function* () {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core.debug(`Cache service version: ${cacheServiceVersion}`);
checkPaths(paths);
checkKey(key);
switch (cacheServiceVersion) {
case 'v2':
return yield saveCacheV2(paths, key, options, enableCrossOsArchive);
case 'v1':
default:
return yield saveCacheV1(paths, key, options, enableCrossOsArchive);
}
});
}
exports.saveCache = saveCache;
/**
* Save cache using the legacy Cache Service
*
* @param paths
* @param key
* @param options
* @param enableCrossOsArchive
* @returns
*/
function saveCacheV1(paths, key, options, enableCrossOsArchive = false) {
var _a, _b, _c, _d, _e;
return __awaiter(this, void 0, void 0, function* () {
const compressionMethod = yield utils.getCompressionMethod();
let cacheId = -1;
const cachePaths = yield utils.resolvePaths(paths);
@@ -186,7 +314,7 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
core.debug('Reserving Cache');
@@ -205,7 +333,95 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
yield cacheHttpClient.saveCache(cacheId, archivePath, '', options);
}
catch (error) {
const typedError = error;
if (typedError.name === ValidationError.name) {
throw error;
}
else if (typedError.name === ReserveCacheError.name) {
core.info(`Failed to save: ${typedError.message}`);
}
else {
core.warning(`Failed to save: ${typedError.message}`);
}
}
finally {
// Try to delete the archive to save space
try {
yield utils.unlinkFile(archivePath);
}
catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
return cacheId;
});
}
/**
* Save cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param key an explicit key for restoring the cache
* @param options cache upload options
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @returns
*/
function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
// Override UploadOptions to force the use of Azure
// ...options goes first because we want to override the default values
// set in UploadOptions with these specific figures
options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true });
const compressionMethod = yield utils.getCompressionMethod();
const twirpClient = cacheTwirpClient.internalCacheTwirpClient();
let cacheId = -1;
const cachePaths = yield utils.resolvePaths(paths);
core.debug('Cache Paths:');
core.debug(`${JSON.stringify(cachePaths)}`);
if (cachePaths.length === 0) {
throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`);
}
const archiveFolder = yield utils.createTempDirectory();
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
try {
yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield (0, tar_1.listTar)(archivePath, compressionMethod);
}
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize;
core.debug('Reserving Cache');
const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive);
const request = {
key,
version
};
const response = yield twirpClient.CreateCacheEntry(request);
if (!response.ok) {
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`);
}
core.debug(`Attempting to upload cache located at: ${archivePath}`);
yield cacheHttpClient.saveCache(cacheId, archivePath, response.signedUploadUrl, options);
const finalizeRequest = {
key,
version,
sizeBytes: `${archiveFileSize}`
};
const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest);
core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`);
if (!finalizeResponse.ok) {
throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`);
}
cacheId = parseInt(finalizeResponse.entryId);
}
catch (error) {
const typedError = error;
@@ -231,5 +447,4 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
return cacheId;
});
}
exports.saveCache = saveCache;
//# sourceMappingURL=cache.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,158 @@
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import type { JsonValue } from "@protobuf-ts/runtime";
import type { JsonReadOptions } from "@protobuf-ts/runtime";
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* A Timestamp represents a point in time independent of any time zone or local
* calendar, encoded as a count of seconds and fractions of seconds at
* nanosecond resolution. The count is relative to an epoch at UTC midnight on
* January 1, 1970, in the proleptic Gregorian calendar which extends the
* Gregorian calendar backwards to year one.
*
* All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
* second table is needed for interpretation, using a [24-hour linear
* smear](https://developers.google.com/time/smear).
*
* The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
* restricting to that range, we ensure that we can convert to and from [RFC
* 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
*
* # Examples
*
* Example 1: Compute Timestamp from POSIX `time()`.
*
* Timestamp timestamp;
* timestamp.set_seconds(time(NULL));
* timestamp.set_nanos(0);
*
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
*
* struct timeval tv;
* gettimeofday(&tv, NULL);
*
* Timestamp timestamp;
* timestamp.set_seconds(tv.tv_sec);
* timestamp.set_nanos(tv.tv_usec * 1000);
*
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
*
* FILETIME ft;
* GetSystemTimeAsFileTime(&ft);
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
*
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
* Timestamp timestamp;
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
*
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
*
* long millis = System.currentTimeMillis();
*
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
* .setNanos((int) ((millis % 1000) * 1000000)).build();
*
*
* Example 5: Compute Timestamp from Java `Instant.now()`.
*
* Instant now = Instant.now();
*
* Timestamp timestamp =
* Timestamp.newBuilder().setSeconds(now.getEpochSecond())
* .setNanos(now.getNano()).build();
*
*
* Example 6: Compute Timestamp from current time in Python.
*
* timestamp = Timestamp()
* timestamp.GetCurrentTime()
*
* # JSON Mapping
*
* In JSON format, the Timestamp type is encoded as a string in the
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
* where {year} is always expressed using four digits while {month}, {day},
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
* is required. A proto3 JSON serializer should always use UTC (as indicated by
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
* able to accept both UTC and other timezones (as indicated by an offset).
*
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
* 01:30 UTC on January 15, 2017.
*
* In JavaScript, one can convert a Date object to this format using the
* standard
* [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
* method. In Python, a standard `datetime.datetime` object can be converted
* to this format using
* [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
* the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
* the Joda Time's [`ISODateTimeFormat.dateTime()`](
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
* ) to obtain a formatter capable of generating timestamps in this format.
*
*
*
* @generated from protobuf message google.protobuf.Timestamp
*/
export interface Timestamp {
/**
* Represents seconds of UTC time since Unix epoch
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
* 9999-12-31T23:59:59Z inclusive.
*
* @generated from protobuf field: int64 seconds = 1;
*/
seconds: string;
/**
* Non-negative fractions of a second at nanosecond resolution. Negative
* second values with fractions must still have non-negative nanos values
* that count forward in time. Must be from 0 to 999,999,999
* inclusive.
*
* @generated from protobuf field: int32 nanos = 2;
*/
nanos: number;
}
declare class Timestamp$Type extends MessageType<Timestamp> {
constructor();
/**
* Creates a new `Timestamp` for the current time.
*/
now(): Timestamp;
/**
* Converts a `Timestamp` to a JavaScript Date.
*/
toDate(message: Timestamp): Date;
/**
* Converts a JavaScript Date to a `Timestamp`.
*/
fromDate(date: Date): Timestamp;
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonWrite(message: Timestamp, options: JsonWriteOptions): JsonValue;
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Timestamp): Timestamp;
create(value?: PartialMessage<Timestamp>): Timestamp;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Timestamp): Timestamp;
internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.Timestamp
*/
export declare const Timestamp: Timestamp$Type;
export {};

View File

@@ -0,0 +1,136 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Timestamp = void 0;
const runtime_1 = require("@protobuf-ts/runtime");
const runtime_2 = require("@protobuf-ts/runtime");
const runtime_3 = require("@protobuf-ts/runtime");
const runtime_4 = require("@protobuf-ts/runtime");
const runtime_5 = require("@protobuf-ts/runtime");
const runtime_6 = require("@protobuf-ts/runtime");
const runtime_7 = require("@protobuf-ts/runtime");
// @generated message type with reflection information, may provide speed optimized methods
class Timestamp$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.Timestamp", [
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
]);
}
/**
* Creates a new `Timestamp` for the current time.
*/
now() {
const msg = this.create();
const ms = Date.now();
msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString();
msg.nanos = (ms % 1000) * 1000000;
return msg;
}
/**
* Converts a `Timestamp` to a JavaScript Date.
*/
toDate(message) {
return new Date(runtime_6.PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000));
}
/**
* Converts a JavaScript Date to a `Timestamp`.
*/
fromDate(date) {
const msg = this.create();
const ms = date.getTime();
msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString();
msg.nanos = (ms % 1000) * 1000000;
return msg;
}
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonWrite(message, options) {
let ms = runtime_6.PbLong.from(message.seconds).toNumber() * 1000;
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
if (message.nanos < 0)
throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative.");
let z = "Z";
if (message.nanos > 0) {
let nanosStr = (message.nanos + 1000000000).toString().substring(1);
if (nanosStr.substring(3) === "000000")
z = "." + nanosStr.substring(0, 3) + "Z";
else if (nanosStr.substring(6) === "000")
z = "." + nanosStr.substring(0, 6) + "Z";
else
z = "." + nanosStr + "Z";
}
return new Date(ms).toISOString().replace(".000Z", z);
}
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonRead(json, options, target) {
if (typeof json !== "string")
throw new Error("Unable to parse Timestamp from JSON " + (0, runtime_5.typeofJsonValue)(json) + ".");
let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);
if (!matches)
throw new Error("Unable to parse Timestamp from JSON. Invalid format.");
let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z"));
if (Number.isNaN(ms))
throw new Error("Unable to parse Timestamp from JSON. Invalid value.");
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
if (!target)
target = this.create();
target.seconds = runtime_6.PbLong.from(ms / 1000).toString();
target.nanos = 0;
if (matches[7])
target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000);
return target;
}
create(value) {
const message = { seconds: "0", nanos: 0 };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 seconds */ 1:
message.seconds = reader.int64().toString();
break;
case /* int32 nanos */ 2:
message.nanos = reader.int32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* int64 seconds = 1; */
if (message.seconds !== "0")
writer.tag(1, runtime_1.WireType.Varint).int64(message.seconds);
/* int32 nanos = 2; */
if (message.nanos !== 0)
writer.tag(2, runtime_1.WireType.Varint).int32(message.nanos);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Timestamp
*/
exports.Timestamp = new Timestamp$Type();
//# sourceMappingURL=timestamp.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,307 @@
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import type { JsonValue } from "@protobuf-ts/runtime";
import type { JsonReadOptions } from "@protobuf-ts/runtime";
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* Wrapper message for `double`.
*
* The JSON representation for `DoubleValue` is JSON number.
*
* @generated from protobuf message google.protobuf.DoubleValue
*/
export interface DoubleValue {
/**
* The double value.
*
* @generated from protobuf field: double value = 1;
*/
value: number;
}
/**
* Wrapper message for `float`.
*
* The JSON representation for `FloatValue` is JSON number.
*
* @generated from protobuf message google.protobuf.FloatValue
*/
export interface FloatValue {
/**
* The float value.
*
* @generated from protobuf field: float value = 1;
*/
value: number;
}
/**
* Wrapper message for `int64`.
*
* The JSON representation for `Int64Value` is JSON string.
*
* @generated from protobuf message google.protobuf.Int64Value
*/
export interface Int64Value {
/**
* The int64 value.
*
* @generated from protobuf field: int64 value = 1;
*/
value: string;
}
/**
* Wrapper message for `uint64`.
*
* The JSON representation for `UInt64Value` is JSON string.
*
* @generated from protobuf message google.protobuf.UInt64Value
*/
export interface UInt64Value {
/**
* The uint64 value.
*
* @generated from protobuf field: uint64 value = 1;
*/
value: string;
}
/**
* Wrapper message for `int32`.
*
* The JSON representation for `Int32Value` is JSON number.
*
* @generated from protobuf message google.protobuf.Int32Value
*/
export interface Int32Value {
/**
* The int32 value.
*
* @generated from protobuf field: int32 value = 1;
*/
value: number;
}
/**
* Wrapper message for `uint32`.
*
* The JSON representation for `UInt32Value` is JSON number.
*
* @generated from protobuf message google.protobuf.UInt32Value
*/
export interface UInt32Value {
/**
* The uint32 value.
*
* @generated from protobuf field: uint32 value = 1;
*/
value: number;
}
/**
* Wrapper message for `bool`.
*
* The JSON representation for `BoolValue` is JSON `true` and `false`.
*
* @generated from protobuf message google.protobuf.BoolValue
*/
export interface BoolValue {
/**
* The bool value.
*
* @generated from protobuf field: bool value = 1;
*/
value: boolean;
}
/**
* Wrapper message for `string`.
*
* The JSON representation for `StringValue` is JSON string.
*
* @generated from protobuf message google.protobuf.StringValue
*/
export interface StringValue {
/**
* The string value.
*
* @generated from protobuf field: string value = 1;
*/
value: string;
}
/**
* Wrapper message for `bytes`.
*
* The JSON representation for `BytesValue` is JSON string.
*
* @generated from protobuf message google.protobuf.BytesValue
*/
export interface BytesValue {
/**
* The bytes value.
*
* @generated from protobuf field: bytes value = 1;
*/
value: Uint8Array;
}
declare class DoubleValue$Type extends MessageType<DoubleValue> {
constructor();
/**
* Encode `DoubleValue` to JSON number.
*/
internalJsonWrite(message: DoubleValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `DoubleValue` from JSON number.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: DoubleValue): DoubleValue;
create(value?: PartialMessage<DoubleValue>): DoubleValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DoubleValue): DoubleValue;
internalBinaryWrite(message: DoubleValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.DoubleValue
*/
export declare const DoubleValue: DoubleValue$Type;
declare class FloatValue$Type extends MessageType<FloatValue> {
constructor();
/**
* Encode `FloatValue` to JSON number.
*/
internalJsonWrite(message: FloatValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `FloatValue` from JSON number.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: FloatValue): FloatValue;
create(value?: PartialMessage<FloatValue>): FloatValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FloatValue): FloatValue;
internalBinaryWrite(message: FloatValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.FloatValue
*/
export declare const FloatValue: FloatValue$Type;
declare class Int64Value$Type extends MessageType<Int64Value> {
constructor();
/**
* Encode `Int64Value` to JSON string.
*/
internalJsonWrite(message: Int64Value, options: JsonWriteOptions): JsonValue;
/**
* Decode `Int64Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value;
create(value?: PartialMessage<Int64Value>): Int64Value;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int64Value): Int64Value;
internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.Int64Value
*/
export declare const Int64Value: Int64Value$Type;
declare class UInt64Value$Type extends MessageType<UInt64Value> {
constructor();
/**
* Encode `UInt64Value` to JSON string.
*/
internalJsonWrite(message: UInt64Value, options: JsonWriteOptions): JsonValue;
/**
* Decode `UInt64Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value;
create(value?: PartialMessage<UInt64Value>): UInt64Value;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt64Value): UInt64Value;
internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt64Value
*/
export declare const UInt64Value: UInt64Value$Type;
declare class Int32Value$Type extends MessageType<Int32Value> {
constructor();
/**
* Encode `Int32Value` to JSON string.
*/
internalJsonWrite(message: Int32Value, options: JsonWriteOptions): JsonValue;
/**
* Decode `Int32Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int32Value): Int32Value;
create(value?: PartialMessage<Int32Value>): Int32Value;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int32Value): Int32Value;
internalBinaryWrite(message: Int32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.Int32Value
*/
export declare const Int32Value: Int32Value$Type;
declare class UInt32Value$Type extends MessageType<UInt32Value> {
constructor();
/**
* Encode `UInt32Value` to JSON string.
*/
internalJsonWrite(message: UInt32Value, options: JsonWriteOptions): JsonValue;
/**
* Decode `UInt32Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt32Value): UInt32Value;
create(value?: PartialMessage<UInt32Value>): UInt32Value;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt32Value): UInt32Value;
internalBinaryWrite(message: UInt32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt32Value
*/
export declare const UInt32Value: UInt32Value$Type;
declare class BoolValue$Type extends MessageType<BoolValue> {
constructor();
/**
* Encode `BoolValue` to JSON bool.
*/
internalJsonWrite(message: BoolValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `BoolValue` from JSON bool.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BoolValue): BoolValue;
create(value?: PartialMessage<BoolValue>): BoolValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BoolValue): BoolValue;
internalBinaryWrite(message: BoolValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.BoolValue
*/
export declare const BoolValue: BoolValue$Type;
declare class StringValue$Type extends MessageType<StringValue> {
constructor();
/**
* Encode `StringValue` to JSON string.
*/
internalJsonWrite(message: StringValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `StringValue` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: StringValue): StringValue;
create(value?: PartialMessage<StringValue>): StringValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StringValue): StringValue;
internalBinaryWrite(message: StringValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.StringValue
*/
export declare const StringValue: StringValue$Type;
declare class BytesValue$Type extends MessageType<BytesValue> {
constructor();
/**
* Encode `BytesValue` to JSON string.
*/
internalJsonWrite(message: BytesValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `BytesValue` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BytesValue): BytesValue;
create(value?: PartialMessage<BytesValue>): BytesValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BytesValue): BytesValue;
internalBinaryWrite(message: BytesValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.BytesValue
*/
export declare const BytesValue: BytesValue$Type;
export {};

View File

@@ -0,0 +1,614 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BytesValue = exports.StringValue = exports.BoolValue = exports.UInt32Value = exports.Int32Value = exports.UInt64Value = exports.Int64Value = exports.FloatValue = exports.DoubleValue = void 0;
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3)
// tslint:disable
//
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//
// Wrappers for primitive (non-message) types. These types are useful
// for embedding primitives in the `google.protobuf.Any` type and for places
// where we need to distinguish between the absence of a primitive
// typed field and its default value.
//
// These wrappers have no meaningful use within repeated fields as they lack
// the ability to detect presence on individual elements.
// These wrappers have no meaningful use within a map or a oneof since
// individual entries of a map or fields of a oneof can already detect presence.
//
const runtime_1 = require("@protobuf-ts/runtime");
const runtime_2 = require("@protobuf-ts/runtime");
const runtime_3 = require("@protobuf-ts/runtime");
const runtime_4 = require("@protobuf-ts/runtime");
const runtime_5 = require("@protobuf-ts/runtime");
const runtime_6 = require("@protobuf-ts/runtime");
const runtime_7 = require("@protobuf-ts/runtime");
// @generated message type with reflection information, may provide speed optimized methods
class DoubleValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.DoubleValue", [
{ no: 1, name: "value", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ }
]);
}
/**
* Encode `DoubleValue` to JSON number.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(2, message.value, "value", false, true);
}
/**
* Decode `DoubleValue` from JSON number.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 1, undefined, "value");
return target;
}
create(value) {
const message = { value: 0 };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* double value */ 1:
message.value = reader.double();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* double value = 1; */
if (message.value !== 0)
writer.tag(1, runtime_3.WireType.Bit64).double(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.DoubleValue
*/
exports.DoubleValue = new DoubleValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FloatValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.FloatValue", [
{ no: 1, name: "value", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ }
]);
}
/**
* Encode `FloatValue` to JSON number.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(1, message.value, "value", false, true);
}
/**
* Decode `FloatValue` from JSON number.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 1, undefined, "value");
return target;
}
create(value) {
const message = { value: 0 };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* float value */ 1:
message.value = reader.float();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* float value = 1; */
if (message.value !== 0)
writer.tag(1, runtime_3.WireType.Bit32).float(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.FloatValue
*/
exports.FloatValue = new FloatValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Int64Value$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.Int64Value", [
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
/**
* Encode `Int64Value` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(runtime_1.ScalarType.INT64, message.value, "value", false, true);
}
/**
* Decode `Int64Value` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.INT64, runtime_2.LongType.STRING, "value");
return target;
}
create(value) {
const message = { value: "0" };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 value */ 1:
message.value = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* int64 value = 1; */
if (message.value !== "0")
writer.tag(1, runtime_3.WireType.Varint).int64(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Int64Value
*/
exports.Int64Value = new Int64Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class UInt64Value$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.UInt64Value", [
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ }
]);
}
/**
* Encode `UInt64Value` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(runtime_1.ScalarType.UINT64, message.value, "value", false, true);
}
/**
* Decode `UInt64Value` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.UINT64, runtime_2.LongType.STRING, "value");
return target;
}
create(value) {
const message = { value: "0" };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* uint64 value */ 1:
message.value = reader.uint64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* uint64 value = 1; */
if (message.value !== "0")
writer.tag(1, runtime_3.WireType.Varint).uint64(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt64Value
*/
exports.UInt64Value = new UInt64Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Int32Value$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.Int32Value", [
{ no: 1, name: "value", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
]);
}
/**
* Encode `Int32Value` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(5, message.value, "value", false, true);
}
/**
* Decode `Int32Value` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 5, undefined, "value");
return target;
}
create(value) {
const message = { value: 0 };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int32 value */ 1:
message.value = reader.int32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* int32 value = 1; */
if (message.value !== 0)
writer.tag(1, runtime_3.WireType.Varint).int32(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Int32Value
*/
exports.Int32Value = new Int32Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class UInt32Value$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.UInt32Value", [
{ no: 1, name: "value", kind: "scalar", T: 13 /*ScalarType.UINT32*/ }
]);
}
/**
* Encode `UInt32Value` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(13, message.value, "value", false, true);
}
/**
* Decode `UInt32Value` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 13, undefined, "value");
return target;
}
create(value) {
const message = { value: 0 };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* uint32 value */ 1:
message.value = reader.uint32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* uint32 value = 1; */
if (message.value !== 0)
writer.tag(1, runtime_3.WireType.Varint).uint32(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt32Value
*/
exports.UInt32Value = new UInt32Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class BoolValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.BoolValue", [
{ no: 1, name: "value", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
]);
}
/**
* Encode `BoolValue` to JSON bool.
*/
internalJsonWrite(message, options) {
return message.value;
}
/**
* Decode `BoolValue` from JSON bool.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 8, undefined, "value");
return target;
}
create(value) {
const message = { value: false };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool value */ 1:
message.value = reader.bool();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool value = 1; */
if (message.value !== false)
writer.tag(1, runtime_3.WireType.Varint).bool(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.BoolValue
*/
exports.BoolValue = new BoolValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class StringValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.StringValue", [
{ no: 1, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
/**
* Encode `StringValue` to JSON string.
*/
internalJsonWrite(message, options) {
return message.value;
}
/**
* Decode `StringValue` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 9, undefined, "value");
return target;
}
create(value) {
const message = { value: "" };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string value */ 1:
message.value = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string value = 1; */
if (message.value !== "")
writer.tag(1, runtime_3.WireType.LengthDelimited).string(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.StringValue
*/
exports.StringValue = new StringValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class BytesValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.BytesValue", [
{ no: 1, name: "value", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
]);
}
/**
* Encode `BytesValue` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(12, message.value, "value", false, true);
}
/**
* Decode `BytesValue` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 12, undefined, "value");
return target;
}
create(value) {
const message = { value: new Uint8Array(0) };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bytes value */ 1:
message.value = reader.bytes();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bytes value = 1; */
if (message.value.length)
writer.tag(1, runtime_3.WireType.LengthDelimited).bytes(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.BytesValue
*/
exports.BytesValue = new BytesValue$Type();
//# sourceMappingURL=wrappers.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,378 @@
import { ServiceType } from "@protobuf-ts/runtime-rpc";
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { CacheEntry } from "../../entities/v1/cacheentry";
import { CacheMetadata } from "../../entities/v1/cachemetadata";
/**
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
*/
export interface CreateCacheEntryRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 3;
*/
version: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
*/
export interface CreateCacheEntryResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* SAS URL to upload the cache archive
*
* @generated from protobuf field: string signed_upload_url = 2;
*/
signedUploadUrl: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
*/
export interface FinalizeCacheEntryUploadRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Size of the cache archive in Bytes
*
* @generated from protobuf field: int64 size_bytes = 3;
*/
sizeBytes: string;
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 4;
*/
version: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
*/
export interface FinalizeCacheEntryUploadResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* Cache entry database ID
*
* @generated from protobuf field: int64 entry_id = 2;
*/
entryId: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
*/
export interface GetCacheEntryDownloadURLRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Restore keys used for prefix searching
*
* @generated from protobuf field: repeated string restore_keys = 3;
*/
restoreKeys: string[];
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 4;
*/
version: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
*/
export interface GetCacheEntryDownloadURLResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* SAS URL to download the cache archive
*
* @generated from protobuf field: string signed_download_url = 2;
*/
signedDownloadUrl: string;
/**
* Key or restore key that matches the lookup
*
* @generated from protobuf field: string matched_key = 3;
*/
matchedKey: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest
*/
export interface DeleteCacheEntryRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse
*/
export interface DeleteCacheEntryResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* Cache entry database ID
*
* @generated from protobuf field: int64 entry_id = 2;
*/
entryId: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesRequest
*/
export interface ListCacheEntriesRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Restore keys used for prefix searching
*
* @generated from protobuf field: repeated string restore_keys = 3;
*/
restoreKeys: string[];
}
/**
* @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesResponse
*/
export interface ListCacheEntriesResponse {
/**
* Cache entries in the defined scope
*
* @generated from protobuf field: repeated github.actions.results.entities.v1.CacheEntry entries = 1;
*/
entries: CacheEntry[];
}
/**
* @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryRequest
*/
export interface LookupCacheEntryRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Restore keys used for prefix searching
*
* @generated from protobuf field: repeated string restore_keys = 3;
*/
restoreKeys: string[];
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 4;
*/
version: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
*/
export interface LookupCacheEntryResponse {
/**
* Indicates whether the cache entry exists or not
*
* @generated from protobuf field: bool exists = 1;
*/
exists: boolean;
/**
* Matched cache entry metadata
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheEntry entry = 2;
*/
entry?: CacheEntry;
}
declare class CreateCacheEntryRequest$Type extends MessageType<CreateCacheEntryRequest> {
constructor();
create(value?: PartialMessage<CreateCacheEntryRequest>): CreateCacheEntryRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryRequest): CreateCacheEntryRequest;
internalBinaryWrite(message: CreateCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
*/
export declare const CreateCacheEntryRequest: CreateCacheEntryRequest$Type;
declare class CreateCacheEntryResponse$Type extends MessageType<CreateCacheEntryResponse> {
constructor();
create(value?: PartialMessage<CreateCacheEntryResponse>): CreateCacheEntryResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryResponse): CreateCacheEntryResponse;
internalBinaryWrite(message: CreateCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
*/
export declare const CreateCacheEntryResponse: CreateCacheEntryResponse$Type;
declare class FinalizeCacheEntryUploadRequest$Type extends MessageType<FinalizeCacheEntryUploadRequest> {
constructor();
create(value?: PartialMessage<FinalizeCacheEntryUploadRequest>): FinalizeCacheEntryUploadRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadRequest): FinalizeCacheEntryUploadRequest;
internalBinaryWrite(message: FinalizeCacheEntryUploadRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
*/
export declare const FinalizeCacheEntryUploadRequest: FinalizeCacheEntryUploadRequest$Type;
declare class FinalizeCacheEntryUploadResponse$Type extends MessageType<FinalizeCacheEntryUploadResponse> {
constructor();
create(value?: PartialMessage<FinalizeCacheEntryUploadResponse>): FinalizeCacheEntryUploadResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadResponse): FinalizeCacheEntryUploadResponse;
internalBinaryWrite(message: FinalizeCacheEntryUploadResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
*/
export declare const FinalizeCacheEntryUploadResponse: FinalizeCacheEntryUploadResponse$Type;
declare class GetCacheEntryDownloadURLRequest$Type extends MessageType<GetCacheEntryDownloadURLRequest> {
constructor();
create(value?: PartialMessage<GetCacheEntryDownloadURLRequest>): GetCacheEntryDownloadURLRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLRequest): GetCacheEntryDownloadURLRequest;
internalBinaryWrite(message: GetCacheEntryDownloadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
*/
export declare const GetCacheEntryDownloadURLRequest: GetCacheEntryDownloadURLRequest$Type;
declare class GetCacheEntryDownloadURLResponse$Type extends MessageType<GetCacheEntryDownloadURLResponse> {
constructor();
create(value?: PartialMessage<GetCacheEntryDownloadURLResponse>): GetCacheEntryDownloadURLResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLResponse): GetCacheEntryDownloadURLResponse;
internalBinaryWrite(message: GetCacheEntryDownloadURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
*/
export declare const GetCacheEntryDownloadURLResponse: GetCacheEntryDownloadURLResponse$Type;
declare class DeleteCacheEntryRequest$Type extends MessageType<DeleteCacheEntryRequest> {
constructor();
create(value?: PartialMessage<DeleteCacheEntryRequest>): DeleteCacheEntryRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteCacheEntryRequest): DeleteCacheEntryRequest;
internalBinaryWrite(message: DeleteCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest
*/
export declare const DeleteCacheEntryRequest: DeleteCacheEntryRequest$Type;
declare class DeleteCacheEntryResponse$Type extends MessageType<DeleteCacheEntryResponse> {
constructor();
create(value?: PartialMessage<DeleteCacheEntryResponse>): DeleteCacheEntryResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteCacheEntryResponse): DeleteCacheEntryResponse;
internalBinaryWrite(message: DeleteCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse
*/
export declare const DeleteCacheEntryResponse: DeleteCacheEntryResponse$Type;
declare class ListCacheEntriesRequest$Type extends MessageType<ListCacheEntriesRequest> {
constructor();
create(value?: PartialMessage<ListCacheEntriesRequest>): ListCacheEntriesRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesRequest): ListCacheEntriesRequest;
internalBinaryWrite(message: ListCacheEntriesRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesRequest
*/
export declare const ListCacheEntriesRequest: ListCacheEntriesRequest$Type;
declare class ListCacheEntriesResponse$Type extends MessageType<ListCacheEntriesResponse> {
constructor();
create(value?: PartialMessage<ListCacheEntriesResponse>): ListCacheEntriesResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesResponse): ListCacheEntriesResponse;
internalBinaryWrite(message: ListCacheEntriesResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse
*/
export declare const ListCacheEntriesResponse: ListCacheEntriesResponse$Type;
declare class LookupCacheEntryRequest$Type extends MessageType<LookupCacheEntryRequest> {
constructor();
create(value?: PartialMessage<LookupCacheEntryRequest>): LookupCacheEntryRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryRequest): LookupCacheEntryRequest;
internalBinaryWrite(message: LookupCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryRequest
*/
export declare const LookupCacheEntryRequest: LookupCacheEntryRequest$Type;
declare class LookupCacheEntryResponse$Type extends MessageType<LookupCacheEntryResponse> {
constructor();
create(value?: PartialMessage<LookupCacheEntryResponse>): LookupCacheEntryResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryResponse): LookupCacheEntryResponse;
internalBinaryWrite(message: LookupCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
*/
export declare const LookupCacheEntryResponse: LookupCacheEntryResponse$Type;
/**
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
*/
export declare const CacheService: ServiceType;
export {};

View File

@@ -0,0 +1,730 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CacheService = exports.LookupCacheEntryResponse = exports.LookupCacheEntryRequest = exports.ListCacheEntriesResponse = exports.ListCacheEntriesRequest = exports.DeleteCacheEntryResponse = exports.DeleteCacheEntryRequest = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0;
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
// tslint:disable
const runtime_rpc_1 = require("@protobuf-ts/runtime-rpc");
const runtime_1 = require("@protobuf-ts/runtime");
const runtime_2 = require("@protobuf-ts/runtime");
const runtime_3 = require("@protobuf-ts/runtime");
const runtime_4 = require("@protobuf-ts/runtime");
const runtime_5 = require("@protobuf-ts/runtime");
const cacheentry_1 = require("../../entities/v1/cacheentry");
const cachemetadata_1 = require("../../entities/v1/cachemetadata");
// @generated message type with reflection information, may provide speed optimized methods
class CreateCacheEntryRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.CreateCacheEntryRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { key: "", version: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* string version */ 3:
message.version = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
/* string version = 3; */
if (message.version !== "")
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
*/
exports.CreateCacheEntryRequest = new CreateCacheEntryRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class CreateCacheEntryResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.CreateCacheEntryResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { ok: false, signedUploadUrl: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* string signed_upload_url */ 2:
message.signedUploadUrl = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
/* string signed_upload_url = 2; */
if (message.signedUploadUrl !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
*/
exports.CreateCacheEntryResponse = new CreateCacheEntryResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeCacheEntryUploadRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { key: "", sizeBytes: "0", version: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* int64 size_bytes */ 3:
message.sizeBytes = reader.int64().toString();
break;
case /* string version */ 4:
message.version = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
/* int64 size_bytes = 3; */
if (message.sizeBytes !== "0")
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
/* string version = 4; */
if (message.version !== "")
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
*/
exports.FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeCacheEntryUploadResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { ok: false, entryId: "0" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* int64 entry_id */ 2:
message.entryId = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
/* int64 entry_id = 2; */
if (message.entryId !== "0")
writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
*/
exports.FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class GetCacheEntryDownloadURLRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { key: "", restoreKeys: [], version: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* repeated string restore_keys */ 3:
message.restoreKeys.push(reader.string());
break;
case /* string version */ 4:
message.version = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
/* repeated string restore_keys = 3; */
for (let i = 0; i < message.restoreKeys.length; i++)
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]);
/* string version = 4; */
if (message.version !== "")
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
*/
exports.GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "matched_key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { ok: false, signedDownloadUrl: "", matchedKey: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* string signed_download_url */ 2:
message.signedDownloadUrl = reader.string();
break;
case /* string matched_key */ 3:
message.matchedKey = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
/* string signed_download_url = 2; */
if (message.signedDownloadUrl !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl);
/* string matched_key = 3; */
if (message.matchedKey !== "")
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
*/
exports.GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class DeleteCacheEntryRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.DeleteCacheEntryRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { key: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest
*/
exports.DeleteCacheEntryRequest = new DeleteCacheEntryRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class DeleteCacheEntryResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.DeleteCacheEntryResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { ok: false, entryId: "0" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* int64 entry_id */ 2:
message.entryId = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
/* int64 entry_id = 2; */
if (message.entryId !== "0")
writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse
*/
exports.DeleteCacheEntryResponse = new DeleteCacheEntryResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class ListCacheEntriesRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.ListCacheEntriesRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { key: "", restoreKeys: [] };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* repeated string restore_keys */ 3:
message.restoreKeys.push(reader.string());
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
/* repeated string restore_keys = 3; */
for (let i = 0; i < message.restoreKeys.length; i++)
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesRequest
*/
exports.ListCacheEntriesRequest = new ListCacheEntriesRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.ListCacheEntriesResponse", [
{ no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => cacheentry_1.CacheEntry }
]);
}
create(value) {
const message = { entries: [] };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* repeated github.actions.results.entities.v1.CacheEntry entries */ 1:
message.entries.push(cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options));
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* repeated github.actions.results.entities.v1.CacheEntry entries = 1; */
for (let i = 0; i < message.entries.length; i++)
cacheentry_1.CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse
*/
exports.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class LookupCacheEntryRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { key: "", restoreKeys: [], version: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* repeated string restore_keys */ 3:
message.restoreKeys.push(reader.string());
break;
case /* string version */ 4:
message.version = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
/* repeated string restore_keys = 3; */
for (let i = 0; i < message.restoreKeys.length; i++)
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]);
/* string version = 4; */
if (message.version !== "")
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryRequest
*/
exports.LookupCacheEntryRequest = new LookupCacheEntryRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.LookupCacheEntryResponse", [
{ no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "entry", kind: "message", T: () => cacheentry_1.CacheEntry }
]);
}
create(value) {
const message = { exists: false };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool exists */ 1:
message.exists = reader.bool();
break;
case /* github.actions.results.entities.v1.CacheEntry entry */ 2:
message.entry = cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options, message.entry);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool exists = 1; */
if (message.exists !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.exists);
/* github.actions.results.entities.v1.CacheEntry entry = 2; */
if (message.entry)
cacheentry_1.CacheEntry.internalBinaryWrite(message.entry, writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
*/
exports.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type();
/**
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
*/
exports.CacheService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.CacheService", [
{ name: "CreateCacheEntry", options: {}, I: exports.CreateCacheEntryRequest, O: exports.CreateCacheEntryResponse },
{ name: "FinalizeCacheEntryUpload", options: {}, I: exports.FinalizeCacheEntryUploadRequest, O: exports.FinalizeCacheEntryUploadResponse },
{ name: "GetCacheEntryDownloadURL", options: {}, I: exports.GetCacheEntryDownloadURLRequest, O: exports.GetCacheEntryDownloadURLResponse },
{ name: "DeleteCacheEntry", options: {}, I: exports.DeleteCacheEntryRequest, O: exports.DeleteCacheEntryResponse },
{ name: "ListCacheEntries", options: {}, I: exports.ListCacheEntriesRequest, O: exports.ListCacheEntriesResponse },
{ name: "LookupCacheEntry", options: {}, I: exports.LookupCacheEntryRequest, O: exports.LookupCacheEntryResponse }
]);
//# sourceMappingURL=cache.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,53 @@
/// <reference types="node" />
import { TwirpContext, TwirpServer } from "twirp-ts";
import { CreateCacheEntryRequest, CreateCacheEntryResponse, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest, GetCacheEntryDownloadURLResponse, DeleteCacheEntryRequest, DeleteCacheEntryResponse, ListCacheEntriesRequest, ListCacheEntriesResponse, LookupCacheEntryRequest, LookupCacheEntryResponse } from "./cache";
interface Rpc {
request(service: string, method: string, contentType: "application/json" | "application/protobuf", data: object | Uint8Array): Promise<object | Uint8Array>;
}
export interface CacheServiceClient {
CreateCacheEntry(request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
FinalizeCacheEntryUpload(request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
GetCacheEntryDownloadURL(request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
DeleteCacheEntry(request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
ListCacheEntries(request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
LookupCacheEntry(request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
}
export declare class CacheServiceClientJSON implements CacheServiceClient {
private readonly rpc;
constructor(rpc: Rpc);
CreateCacheEntry(request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
FinalizeCacheEntryUpload(request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
GetCacheEntryDownloadURL(request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
DeleteCacheEntry(request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
ListCacheEntries(request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
LookupCacheEntry(request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
}
export declare class CacheServiceClientProtobuf implements CacheServiceClient {
private readonly rpc;
constructor(rpc: Rpc);
CreateCacheEntry(request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
FinalizeCacheEntryUpload(request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
GetCacheEntryDownloadURL(request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
DeleteCacheEntry(request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
ListCacheEntries(request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
LookupCacheEntry(request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
}
export interface CacheServiceTwirp<T extends TwirpContext = TwirpContext> {
CreateCacheEntry(ctx: T, request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
FinalizeCacheEntryUpload(ctx: T, request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
GetCacheEntryDownloadURL(ctx: T, request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
DeleteCacheEntry(ctx: T, request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
ListCacheEntries(ctx: T, request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
LookupCacheEntry(ctx: T, request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
}
export declare enum CacheServiceMethod {
CreateCacheEntry = "CreateCacheEntry",
FinalizeCacheEntryUpload = "FinalizeCacheEntryUpload",
GetCacheEntryDownloadURL = "GetCacheEntryDownloadURL",
DeleteCacheEntry = "DeleteCacheEntry",
ListCacheEntries = "ListCacheEntries",
LookupCacheEntry = "LookupCacheEntry"
}
export declare const CacheServiceMethodList: CacheServiceMethod[];
export declare function createCacheServiceServer<T extends TwirpContext = TwirpContext>(service: CacheServiceTwirp<T>): TwirpServer<CacheServiceTwirp<TwirpContext<import("http").IncomingMessage, import("http").ServerResponse<import("http").IncomingMessage>>>, T>;
export {};

View File

@@ -0,0 +1,602 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.createCacheServiceServer = exports.CacheServiceMethodList = exports.CacheServiceMethod = exports.CacheServiceClientProtobuf = exports.CacheServiceClientJSON = void 0;
const twirp_ts_1 = require("twirp-ts");
const cache_1 = require("./cache");
class CacheServiceClientJSON {
constructor(rpc) {
this.rpc = rpc;
this.CreateCacheEntry.bind(this);
this.FinalizeCacheEntryUpload.bind(this);
this.GetCacheEntryDownloadURL.bind(this);
this.DeleteCacheEntry.bind(this);
this.ListCacheEntries.bind(this);
this.LookupCacheEntry.bind(this);
}
CreateCacheEntry(request) {
const data = cache_1.CreateCacheEntryRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/json", data);
return promise.then((data) => cache_1.CreateCacheEntryResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
FinalizeCacheEntryUpload(request) {
const data = cache_1.FinalizeCacheEntryUploadRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/json", data);
return promise.then((data) => cache_1.FinalizeCacheEntryUploadResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
GetCacheEntryDownloadURL(request) {
const data = cache_1.GetCacheEntryDownloadURLRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/json", data);
return promise.then((data) => cache_1.GetCacheEntryDownloadURLResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
DeleteCacheEntry(request) {
const data = cache_1.DeleteCacheEntryRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/json", data);
return promise.then((data) => cache_1.DeleteCacheEntryResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
ListCacheEntries(request) {
const data = cache_1.ListCacheEntriesRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/json", data);
return promise.then((data) => cache_1.ListCacheEntriesResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
LookupCacheEntry(request) {
const data = cache_1.LookupCacheEntryRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/json", data);
return promise.then((data) => cache_1.LookupCacheEntryResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
}
exports.CacheServiceClientJSON = CacheServiceClientJSON;
class CacheServiceClientProtobuf {
constructor(rpc) {
this.rpc = rpc;
this.CreateCacheEntry.bind(this);
this.FinalizeCacheEntryUpload.bind(this);
this.GetCacheEntryDownloadURL.bind(this);
this.DeleteCacheEntry.bind(this);
this.ListCacheEntries.bind(this);
this.LookupCacheEntry.bind(this);
}
CreateCacheEntry(request) {
const data = cache_1.CreateCacheEntryRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/protobuf", data);
return promise.then((data) => cache_1.CreateCacheEntryResponse.fromBinary(data));
}
FinalizeCacheEntryUpload(request) {
const data = cache_1.FinalizeCacheEntryUploadRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/protobuf", data);
return promise.then((data) => cache_1.FinalizeCacheEntryUploadResponse.fromBinary(data));
}
GetCacheEntryDownloadURL(request) {
const data = cache_1.GetCacheEntryDownloadURLRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/protobuf", data);
return promise.then((data) => cache_1.GetCacheEntryDownloadURLResponse.fromBinary(data));
}
DeleteCacheEntry(request) {
const data = cache_1.DeleteCacheEntryRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/protobuf", data);
return promise.then((data) => cache_1.DeleteCacheEntryResponse.fromBinary(data));
}
ListCacheEntries(request) {
const data = cache_1.ListCacheEntriesRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/protobuf", data);
return promise.then((data) => cache_1.ListCacheEntriesResponse.fromBinary(data));
}
LookupCacheEntry(request) {
const data = cache_1.LookupCacheEntryRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/protobuf", data);
return promise.then((data) => cache_1.LookupCacheEntryResponse.fromBinary(data));
}
}
exports.CacheServiceClientProtobuf = CacheServiceClientProtobuf;
var CacheServiceMethod;
(function (CacheServiceMethod) {
CacheServiceMethod["CreateCacheEntry"] = "CreateCacheEntry";
CacheServiceMethod["FinalizeCacheEntryUpload"] = "FinalizeCacheEntryUpload";
CacheServiceMethod["GetCacheEntryDownloadURL"] = "GetCacheEntryDownloadURL";
CacheServiceMethod["DeleteCacheEntry"] = "DeleteCacheEntry";
CacheServiceMethod["ListCacheEntries"] = "ListCacheEntries";
CacheServiceMethod["LookupCacheEntry"] = "LookupCacheEntry";
})(CacheServiceMethod || (exports.CacheServiceMethod = CacheServiceMethod = {}));
exports.CacheServiceMethodList = [
CacheServiceMethod.CreateCacheEntry,
CacheServiceMethod.FinalizeCacheEntryUpload,
CacheServiceMethod.GetCacheEntryDownloadURL,
CacheServiceMethod.DeleteCacheEntry,
CacheServiceMethod.ListCacheEntries,
CacheServiceMethod.LookupCacheEntry,
];
function createCacheServiceServer(service) {
return new twirp_ts_1.TwirpServer({
service,
packageName: "github.actions.results.api.v1",
serviceName: "CacheService",
methodList: exports.CacheServiceMethodList,
matchRoute: matchCacheServiceRoute,
});
}
exports.createCacheServiceServer = createCacheServiceServer;
function matchCacheServiceRoute(method, events) {
switch (method) {
case "CreateCacheEntry":
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
ctx = Object.assign(Object.assign({}, ctx), { methodName: "CreateCacheEntry" });
yield events.onMatch(ctx);
return handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors);
});
case "FinalizeCacheEntryUpload":
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
ctx = Object.assign(Object.assign({}, ctx), { methodName: "FinalizeCacheEntryUpload" });
yield events.onMatch(ctx);
return handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors);
});
case "GetCacheEntryDownloadURL":
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
ctx = Object.assign(Object.assign({}, ctx), { methodName: "GetCacheEntryDownloadURL" });
yield events.onMatch(ctx);
return handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors);
});
case "DeleteCacheEntry":
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
ctx = Object.assign(Object.assign({}, ctx), { methodName: "DeleteCacheEntry" });
yield events.onMatch(ctx);
return handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors);
});
case "ListCacheEntries":
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
ctx = Object.assign(Object.assign({}, ctx), { methodName: "ListCacheEntries" });
yield events.onMatch(ctx);
return handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors);
});
case "LookupCacheEntry":
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
ctx = Object.assign(Object.assign({}, ctx), { methodName: "LookupCacheEntry" });
yield events.onMatch(ctx);
return handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors);
});
default:
events.onNotFound();
const msg = `no handler found`;
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors) {
switch (ctx.contentType) {
case twirp_ts_1.TwirpContentType.JSON:
return handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors);
case twirp_ts_1.TwirpContentType.Protobuf:
return handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors);
default:
const msg = "unexpected Content-Type";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors) {
switch (ctx.contentType) {
case twirp_ts_1.TwirpContentType.JSON:
return handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors);
case twirp_ts_1.TwirpContentType.Protobuf:
return handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors);
default:
const msg = "unexpected Content-Type";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors) {
switch (ctx.contentType) {
case twirp_ts_1.TwirpContentType.JSON:
return handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors);
case twirp_ts_1.TwirpContentType.Protobuf:
return handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors);
default:
const msg = "unexpected Content-Type";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors) {
switch (ctx.contentType) {
case twirp_ts_1.TwirpContentType.JSON:
return handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors);
case twirp_ts_1.TwirpContentType.Protobuf:
return handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors);
default:
const msg = "unexpected Content-Type";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors) {
switch (ctx.contentType) {
case twirp_ts_1.TwirpContentType.JSON:
return handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors);
case twirp_ts_1.TwirpContentType.Protobuf:
return handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors);
default:
const msg = "unexpected Content-Type";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors) {
switch (ctx.contentType) {
case twirp_ts_1.TwirpContentType.JSON:
return handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors);
case twirp_ts_1.TwirpContentType.Protobuf:
return handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors);
default:
const msg = "unexpected Content-Type";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
const body = JSON.parse(data.toString() || "{}");
request = cache_1.CreateCacheEntryRequest.fromJson(body, {
ignoreUnknownFields: true,
});
}
catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.CreateCacheEntry(ctx, inputReq);
});
}
else {
response = yield service.CreateCacheEntry(ctx, request);
}
return JSON.stringify(cache_1.CreateCacheEntryResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}));
});
}
function handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
const body = JSON.parse(data.toString() || "{}");
request = cache_1.FinalizeCacheEntryUploadRequest.fromJson(body, {
ignoreUnknownFields: true,
});
}
catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.FinalizeCacheEntryUpload(ctx, inputReq);
});
}
else {
response = yield service.FinalizeCacheEntryUpload(ctx, request);
}
return JSON.stringify(cache_1.FinalizeCacheEntryUploadResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}));
});
}
function handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
const body = JSON.parse(data.toString() || "{}");
request = cache_1.GetCacheEntryDownloadURLRequest.fromJson(body, {
ignoreUnknownFields: true,
});
}
catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.GetCacheEntryDownloadURL(ctx, inputReq);
});
}
else {
response = yield service.GetCacheEntryDownloadURL(ctx, request);
}
return JSON.stringify(cache_1.GetCacheEntryDownloadURLResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}));
});
}
function handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
const body = JSON.parse(data.toString() || "{}");
request = cache_1.DeleteCacheEntryRequest.fromJson(body, {
ignoreUnknownFields: true,
});
}
catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.DeleteCacheEntry(ctx, inputReq);
});
}
else {
response = yield service.DeleteCacheEntry(ctx, request);
}
return JSON.stringify(cache_1.DeleteCacheEntryResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}));
});
}
function handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
const body = JSON.parse(data.toString() || "{}");
request = cache_1.ListCacheEntriesRequest.fromJson(body, {
ignoreUnknownFields: true,
});
}
catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.ListCacheEntries(ctx, inputReq);
});
}
else {
response = yield service.ListCacheEntries(ctx, request);
}
return JSON.stringify(cache_1.ListCacheEntriesResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}));
});
}
function handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
const body = JSON.parse(data.toString() || "{}");
request = cache_1.LookupCacheEntryRequest.fromJson(body, {
ignoreUnknownFields: true,
});
}
catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.LookupCacheEntry(ctx, inputReq);
});
}
else {
response = yield service.LookupCacheEntry(ctx, request);
}
return JSON.stringify(cache_1.LookupCacheEntryResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}));
});
}
function handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
request = cache_1.CreateCacheEntryRequest.fromBinary(data);
}
catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.CreateCacheEntry(ctx, inputReq);
});
}
else {
response = yield service.CreateCacheEntry(ctx, request);
}
return Buffer.from(cache_1.CreateCacheEntryResponse.toBinary(response));
});
}
function handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
request = cache_1.FinalizeCacheEntryUploadRequest.fromBinary(data);
}
catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.FinalizeCacheEntryUpload(ctx, inputReq);
});
}
else {
response = yield service.FinalizeCacheEntryUpload(ctx, request);
}
return Buffer.from(cache_1.FinalizeCacheEntryUploadResponse.toBinary(response));
});
}
function handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
request = cache_1.GetCacheEntryDownloadURLRequest.fromBinary(data);
}
catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.GetCacheEntryDownloadURL(ctx, inputReq);
});
}
else {
response = yield service.GetCacheEntryDownloadURL(ctx, request);
}
return Buffer.from(cache_1.GetCacheEntryDownloadURLResponse.toBinary(response));
});
}
function handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
request = cache_1.DeleteCacheEntryRequest.fromBinary(data);
}
catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.DeleteCacheEntry(ctx, inputReq);
});
}
else {
response = yield service.DeleteCacheEntry(ctx, request);
}
return Buffer.from(cache_1.DeleteCacheEntryResponse.toBinary(response));
});
}
function handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
request = cache_1.ListCacheEntriesRequest.fromBinary(data);
}
catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.ListCacheEntries(ctx, inputReq);
});
}
else {
response = yield service.ListCacheEntries(ctx, request);
}
return Buffer.from(cache_1.ListCacheEntriesResponse.toBinary(response));
});
}
function handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
request = cache_1.LookupCacheEntryRequest.fromBinary(data);
}
catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.LookupCacheEntry(ctx, inputReq);
});
}
else {
response = yield service.LookupCacheEntry(ctx, request);
}
return Buffer.from(cache_1.LookupCacheEntryResponse.toBinary(response));
});
}
//# sourceMappingURL=cache.twirp.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,71 @@
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { Timestamp } from "../../../google/protobuf/timestamp";
/**
* @generated from protobuf message github.actions.results.entities.v1.CacheEntry
*/
export interface CacheEntry {
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 1;
*/
key: string;
/**
* SHA256 hex digest of the cache archive
*
* @generated from protobuf field: string hash = 2;
*/
hash: string;
/**
* Cache entry size in bytes
*
* @generated from protobuf field: int64 size_bytes = 3;
*/
sizeBytes: string;
/**
* Access scope
*
* @generated from protobuf field: string scope = 4;
*/
scope: string;
/**
* Version SHA256 hex digest
*
* @generated from protobuf field: string version = 5;
*/
version: string;
/**
* When the cache entry was created
*
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
*/
createdAt?: Timestamp;
/**
* When the cache entry was last accessed
*
* @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7;
*/
lastAccessedAt?: Timestamp;
/**
* When the cache entry is set to expire
*
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 8;
*/
expiresAt?: Timestamp;
}
declare class CacheEntry$Type extends MessageType<CacheEntry> {
constructor();
create(value?: PartialMessage<CacheEntry>): CacheEntry;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheEntry): CacheEntry;
internalBinaryWrite(message: CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry
*/
export declare const CacheEntry: CacheEntry$Type;
export {};

View File

@@ -0,0 +1,106 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CacheEntry = void 0;
const runtime_1 = require("@protobuf-ts/runtime");
const runtime_2 = require("@protobuf-ts/runtime");
const runtime_3 = require("@protobuf-ts/runtime");
const runtime_4 = require("@protobuf-ts/runtime");
const runtime_5 = require("@protobuf-ts/runtime");
const timestamp_1 = require("../../../google/protobuf/timestamp");
// @generated message type with reflection information, may provide speed optimized methods
class CacheEntry$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.entities.v1.CacheEntry", [
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
]);
}
create(value) {
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string key */ 1:
message.key = reader.string();
break;
case /* string hash */ 2:
message.hash = reader.string();
break;
case /* int64 size_bytes */ 3:
message.sizeBytes = reader.int64().toString();
break;
case /* string scope */ 4:
message.scope = reader.string();
break;
case /* string version */ 5:
message.version = reader.string();
break;
case /* google.protobuf.Timestamp created_at */ 6:
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
break;
case /* google.protobuf.Timestamp last_accessed_at */ 7:
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
break;
case /* google.protobuf.Timestamp expires_at */ 8:
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string key = 1; */
if (message.key !== "")
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
/* string hash = 2; */
if (message.hash !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
/* int64 size_bytes = 3; */
if (message.sizeBytes !== "0")
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
/* string scope = 4; */
if (message.scope !== "")
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
/* string version = 5; */
if (message.version !== "")
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
/* google.protobuf.Timestamp created_at = 6; */
if (message.createdAt)
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* google.protobuf.Timestamp last_accessed_at = 7; */
if (message.lastAccessedAt)
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* google.protobuf.Timestamp expires_at = 8; */
if (message.expiresAt)
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry
*/
exports.CacheEntry = new CacheEntry$Type();
//# sourceMappingURL=cacheentry.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"cacheentry.js","sourceRoot":"","sources":["../../../../../src/generated/results/entities/v1/cacheentry.ts"],"names":[],"mappings":";;;AAKA,kDAAgD;AAGhD,kDAA2D;AAE3D,kDAA8D;AAC9D,kDAAoD;AACpD,kDAAmD;AACnD,kEAA+D;AAsD/D,2FAA2F;AAC3F,MAAM,eAAgB,SAAQ,qBAAuB;IACjD;QACI,KAAK,CAAC,+CAA+C,EAAE;YACnD,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YAClE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACnE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,oBAAoB,EAAE;YACxE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACpE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACtE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,qBAAS,EAAE;YAClE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,kBAAkB,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,qBAAS,EAAE;YACxE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,qBAAS,EAAE;SACrE,CAAC,CAAC;IACP,CAAC;IACD,MAAM,CAAC,KAAkC;QACrC,MAAM,OAAO,GAAG,EAAE,GAAG,EAAE,EAAE,EAAE,IAAI,EAAE,EAAE,EAAE,SAAS,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,EAAE,OAAO,EAAE,EAAE,EAAE,CAAC;QAC9E,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,sBAAY,EAAE,EAAE,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5F,IAAI,KAAK,KAAK,SAAS;YACnB,IAAA,gCAAsB,EAAa,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC;QAC7D,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,kBAAkB,CAAC,MAAqB,EAAE,MAAc,EAAE,OAA0B,EAAE,MAAmB;QACrG,IAAI,OAAO,GAAG,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,IAAI,CAAC,MAAM,EAAE,EAAE,GAAG,GAAG,MAAM,CAAC,GAAG,GAAG,MAAM,CAAC;QACjE,OAAO,MAAM,CAAC,GAAG,GAAG,GAAG,EAAE;YACrB,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,GAAG,MAAM,CAAC,GAAG,EAAE,CAAC;YACvC,QAAQ,OAAO,EAAE;gBACb,KAAK,gBAAgB,CAAC,CAAC;oBACnB,OAAO,CAAC,GAAG,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAC9B,MAAM;gBACV,KAAK,iBAAiB,CAAC,CAAC;oBACpB,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAC/B,MAAM;gBACV,KAAK,sBAAsB,CAAC,CAAC;oBACzB,OAAO,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC;oBAC9C,MAAM;gBACV,KAAK,kBAAkB,CAAC,CAAC;oBACrB,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAChC,MAAM;gBACV,KAAK,oBAAoB,CAAC,CAAC;oBACvB,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAClC,MAAM;gBACV,KAAK,0CAA0C,CAAC,CAAC;oBAC7C,OAAO,CAAC,SAAS,GAAG,qBAAS,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;oBACtG,MAAM;gBACV,KAAK,gDAAgD,CAAC,CAAC;oBACnD,OAAO,CAAC,cAAc,GAAG,qBAAS,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,cAAc,CAAC,CAAC;oBAChH,MAAM;gBACV,KAAK,0CAA0C,CAAC,CAAC;oBAC7C,OAAO,CAAC,SAAS,GAAG,qBAAS,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;oBACtG,MAAM;gBACV;oBACI,IAAI,CAAC,GAAG,OAAO,CAAC,gBAAgB,CAAC;oBACjC,IAAI,CAAC,KAAK,OAAO;wBACb,MAAM,IAAI,UAAU,CAAC,KAAK,CAAC,iBAAiB,OAAO,eAAe,QAAQ,SAAS,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;oBACxG,IAAI,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC9B,IAAI,CAAC,KAAK,KAAK;wBACX,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;aACvG;SACJ;QACD,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,mBAAmB,CAAC,OAAmB,EAAE,MAAqB,EAAE,OAA2B;QACvF,qBAAqB;QACrB,IAAI,OAAO,CAAC,GAAG,KAAK,EAAE;YAClB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAChE,sBAAsB;QACtB,IAAI,OAAO,CAAC,IAAI,KAAK,EAAE;YACnB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACjE,2BAA2B;QAC3B,IAAI,OAAO,CAAC,SAAS,KAAK,GAAG;YACzB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;QAC5D,uBAAuB;QACvB,IAAI,OAAO,CAAC,KAAK,KAAK,EAAE;YACpB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QAClE,yBAAyB;QACzB,IAAI,OAAO,CAAC,OAAO,KAAK,EAAE;YACtB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QACpE,+CAA+C;QAC/C,IAAI,OAAO,CAAC,SAAS;YACjB,qBAAS,CAAC,mBAAmB,CAAC,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QACrH,qDAAqD;QACrD,IAAI,OAAO,CAAC,cAAc;YACtB,qBAAS,CAAC,mBAAmB,CAAC,OAAO,CAAC,cAAc,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QAC1H,+CAA+C;QAC/C,IAAI,OAAO,CAAC,SAAS;YACjB,qBAAS,CAAC,mBAAmB,CAAC,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QACrH,IAAI,CAAC,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACnC,IAAI,CAAC,KAAK,KAAK;YACX,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QAClF,OAAO,MAAM,CAAC;IAClB,CAAC;CACJ;AACD;;GAEG;AACU,QAAA,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC"}

View File

@@ -0,0 +1,35 @@
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { CacheScope } from "./cachescope";
/**
* @generated from protobuf message github.actions.results.entities.v1.CacheMetadata
*/
export interface CacheMetadata {
/**
* Backend repository id
*
* @generated from protobuf field: int64 repository_id = 1;
*/
repositoryId: string;
/**
* Scopes for the cache entry
*
* @generated from protobuf field: repeated github.actions.results.entities.v1.CacheScope scope = 2;
*/
scope: CacheScope[];
}
declare class CacheMetadata$Type extends MessageType<CacheMetadata> {
constructor();
create(value?: PartialMessage<CacheMetadata>): CacheMetadata;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheMetadata): CacheMetadata;
internalBinaryWrite(message: CacheMetadata, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata
*/
export declare const CacheMetadata: CacheMetadata$Type;
export {};

View File

@@ -0,0 +1,64 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CacheMetadata = void 0;
const runtime_1 = require("@protobuf-ts/runtime");
const runtime_2 = require("@protobuf-ts/runtime");
const runtime_3 = require("@protobuf-ts/runtime");
const runtime_4 = require("@protobuf-ts/runtime");
const runtime_5 = require("@protobuf-ts/runtime");
const cachescope_1 = require("./cachescope");
// @generated message type with reflection information, may provide speed optimized methods
class CacheMetadata$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.entities.v1.CacheMetadata", [
{ no: 1, name: "repository_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 2, name: "scope", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => cachescope_1.CacheScope }
]);
}
create(value) {
const message = { repositoryId: "0", scope: [] };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 repository_id */ 1:
message.repositoryId = reader.int64().toString();
break;
case /* repeated github.actions.results.entities.v1.CacheScope scope */ 2:
message.scope.push(cachescope_1.CacheScope.internalBinaryRead(reader, reader.uint32(), options));
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* int64 repository_id = 1; */
if (message.repositoryId !== "0")
writer.tag(1, runtime_1.WireType.Varint).int64(message.repositoryId);
/* repeated github.actions.results.entities.v1.CacheScope scope = 2; */
for (let i = 0; i < message.scope.length; i++)
cachescope_1.CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata
*/
exports.CacheMetadata = new CacheMetadata$Type();
//# sourceMappingURL=cachemetadata.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"cachemetadata.js","sourceRoot":"","sources":["../../../../../src/generated/results/entities/v1/cachemetadata.ts"],"names":[],"mappings":";;;AAKA,kDAAgD;AAGhD,kDAA2D;AAE3D,kDAA8D;AAC9D,kDAAoD;AACpD,kDAAmD;AACnD,6CAA0C;AAkB1C,2FAA2F;AAC3F,MAAM,kBAAmB,SAAQ,qBAA0B;IACvD;QACI,KAAK,CAAC,kDAAkD,EAAE;YACtD,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,eAAe,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,oBAAoB,EAAE;YAC3E,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,CAAC,CAAC,qBAAqB,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,uBAAU,EAAE;SAClG,CAAC,CAAC;IACP,CAAC;IACD,MAAM,CAAC,KAAqC;QACxC,MAAM,OAAO,GAAG,EAAE,YAAY,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC;QACjD,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,sBAAY,EAAE,EAAE,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5F,IAAI,KAAK,KAAK,SAAS;YACnB,IAAA,gCAAsB,EAAgB,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC;QAChE,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,kBAAkB,CAAC,MAAqB,EAAE,MAAc,EAAE,OAA0B,EAAE,MAAsB;QACxG,IAAI,OAAO,GAAG,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,IAAI,CAAC,MAAM,EAAE,EAAE,GAAG,GAAG,MAAM,CAAC,GAAG,GAAG,MAAM,CAAC;QACjE,OAAO,MAAM,CAAC,GAAG,GAAG,GAAG,EAAE;YACrB,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,GAAG,MAAM,CAAC,GAAG,EAAE,CAAC;YACvC,QAAQ,OAAO,EAAE;gBACb,KAAK,yBAAyB,CAAC,CAAC;oBAC5B,OAAO,CAAC,YAAY,GAAG,MAAM,CAAC,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC;oBACjD,MAAM;gBACV,KAAK,kEAAkE,CAAC,CAAC;oBACrE,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,uBAAU,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,CAAC,CAAC,CAAC;oBACpF,MAAM;gBACV;oBACI,IAAI,CAAC,GAAG,OAAO,CAAC,gBAAgB,CAAC;oBACjC,IAAI,CAAC,KAAK,OAAO;wBACb,MAAM,IAAI,UAAU,CAAC,KAAK,CAAC,iBAAiB,OAAO,eAAe,QAAQ,SAAS,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;oBACxG,IAAI,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC9B,IAAI,CAAC,KAAK,KAAK;wBACX,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;aACvG;SACJ;QACD,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,mBAAmB,CAAC,OAAsB,EAAE,MAAqB,EAAE,OAA2B;QAC1F,8BAA8B;QAC9B,IAAI,OAAO,CAAC,YAAY,KAAK,GAAG;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC/D,uEAAuE;QACvE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE;YACzC,uBAAU,CAAC,mBAAmB,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QACrH,IAAI,CAAC,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACnC,IAAI,CAAC,KAAK,KAAK;YACX,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QAClF,OAAO,MAAM,CAAC;IAClB,CAAC;CACJ;AACD;;GAEG;AACU,QAAA,aAAa,GAAG,IAAI,kBAAkB,EAAE,CAAC"}

View File

@@ -0,0 +1,34 @@
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* @generated from protobuf message github.actions.results.entities.v1.CacheScope
*/
export interface CacheScope {
/**
* Determines the scope of the cache entry
*
* @generated from protobuf field: string scope = 1;
*/
scope: string;
/**
* None: 0 | Read: 1 | Write: 2 | All: (1|2)
*
* @generated from protobuf field: int64 permission = 2;
*/
permission: string;
}
declare class CacheScope$Type extends MessageType<CacheScope> {
constructor();
create(value?: PartialMessage<CacheScope>): CacheScope;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheScope): CacheScope;
internalBinaryWrite(message: CacheScope, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheScope
*/
export declare const CacheScope: CacheScope$Type;
export {};

View File

@@ -0,0 +1,63 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CacheScope = void 0;
const runtime_1 = require("@protobuf-ts/runtime");
const runtime_2 = require("@protobuf-ts/runtime");
const runtime_3 = require("@protobuf-ts/runtime");
const runtime_4 = require("@protobuf-ts/runtime");
const runtime_5 = require("@protobuf-ts/runtime");
// @generated message type with reflection information, may provide speed optimized methods
class CacheScope$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.entities.v1.CacheScope", [
{ no: 1, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "permission", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { scope: "", permission: "0" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string scope */ 1:
message.scope = reader.string();
break;
case /* int64 permission */ 2:
message.permission = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string scope = 1; */
if (message.scope !== "")
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.scope);
/* int64 permission = 2; */
if (message.permission !== "0")
writer.tag(2, runtime_1.WireType.Varint).int64(message.permission);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheScope
*/
exports.CacheScope = new CacheScope$Type();
//# sourceMappingURL=cachescope.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"cachescope.js","sourceRoot":"","sources":["../../../../../src/generated/results/entities/v1/cachescope.ts"],"names":[],"mappings":";;;AAKA,kDAAgD;AAGhD,kDAA2D;AAE3D,kDAA8D;AAC9D,kDAAoD;AACpD,kDAAmD;AAkBnD,2FAA2F;AAC3F,MAAM,eAAgB,SAAQ,qBAAuB;IACjD;QACI,KAAK,CAAC,+CAA+C,EAAE;YACnD,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACpE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,oBAAoB,EAAE;SAC3E,CAAC,CAAC;IACP,CAAC;IACD,MAAM,CAAC,KAAkC;QACrC,MAAM,OAAO,GAAG,EAAE,KAAK,EAAE,EAAE,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC;QAC/C,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,sBAAY,EAAE,EAAE,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5F,IAAI,KAAK,KAAK,SAAS;YACnB,IAAA,gCAAsB,EAAa,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC;QAC7D,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,kBAAkB,CAAC,MAAqB,EAAE,MAAc,EAAE,OAA0B,EAAE,MAAmB;QACrG,IAAI,OAAO,GAAG,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,IAAI,CAAC,MAAM,EAAE,EAAE,GAAG,GAAG,MAAM,CAAC,GAAG,GAAG,MAAM,CAAC;QACjE,OAAO,MAAM,CAAC,GAAG,GAAG,GAAG,EAAE;YACrB,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,GAAG,MAAM,CAAC,GAAG,EAAE,CAAC;YACvC,QAAQ,OAAO,EAAE;gBACb,KAAK,kBAAkB,CAAC,CAAC;oBACrB,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAChC,MAAM;gBACV,KAAK,sBAAsB,CAAC,CAAC;oBACzB,OAAO,CAAC,UAAU,GAAG,MAAM,CAAC,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC;oBAC/C,MAAM;gBACV;oBACI,IAAI,CAAC,GAAG,OAAO,CAAC,gBAAgB,CAAC;oBACjC,IAAI,CAAC,KAAK,OAAO;wBACb,MAAM,IAAI,UAAU,CAAC,KAAK,CAAC,iBAAiB,OAAO,eAAe,QAAQ,SAAS,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;oBACxG,IAAI,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC9B,IAAI,CAAC,KAAK,KAAK;wBACX,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;aACvG;SACJ;QACD,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,mBAAmB,CAAC,OAAmB,EAAE,MAAqB,EAAE,OAA2B;QACvF,uBAAuB;QACvB,IAAI,OAAO,CAAC,KAAK,KAAK,EAAE;YACpB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QAClE,2BAA2B;QAC3B,IAAI,OAAO,CAAC,UAAU,KAAK,GAAG;YAC1B,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;QAC7D,IAAI,CAAC,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACnC,IAAI,CAAC,KAAK,KAAK;YACX,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QAClF,OAAO,MAAM,CAAC;IAClB,CAAC;CACJ;AACD;;GAEG;AACU,QAAA,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC"}

View File

@@ -1,8 +1,6 @@
import { CompressionMethod } from './constants';
import { ArtifactCacheEntry, InternalCacheOptions, ReserveCacheResponse, ITypedResponseWithError } from './contracts';
import { DownloadOptions, UploadOptions } from '../options';
export declare function getCacheVersion(paths: string[], compressionMethod?: CompressionMethod, enableCrossOsArchive?: boolean): string;
export declare function getCacheEntry(keys: string[], paths: string[], options?: InternalCacheOptions): Promise<ArtifactCacheEntry | null>;
export declare function downloadCache(archiveLocation: string, archivePath: string, options?: DownloadOptions): Promise<void>;
export declare function reserveCache(key: string, paths: string[], options?: InternalCacheOptions): Promise<ITypedResponseWithError<ReserveCacheResponse>>;
export declare function saveCache(cacheId: number, archivePath: string, options?: UploadOptions): Promise<void>;
export declare function saveCache(cacheId: number, archivePath: string, signedUploadURL?: string, options?: UploadOptions): Promise<void>;

View File

@@ -32,20 +32,21 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0;
exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = void 0;
const core = __importStar(require("@actions/core"));
const http_client_1 = require("@actions/http-client");
const auth_1 = require("@actions/http-client/lib/auth");
const crypto = __importStar(require("crypto"));
const fs = __importStar(require("fs"));
const url_1 = require("url");
const utils = __importStar(require("./cacheUtils"));
const uploadUtils_1 = require("./uploadUtils");
const downloadUtils_1 = require("./downloadUtils");
const options_1 = require("../options");
const requestUtils_1 = require("./requestUtils");
const versionSalt = '1.0';
const config_1 = require("./config");
const user_agent_1 = require("./shared/user-agent");
function getCacheApiUrl(resource) {
const baseUrl = process.env['ACTIONS_CACHE_URL'] || '';
const baseUrl = (0, config_1.getCacheServiceURL)();
if (!baseUrl) {
throw new Error('Cache Service Url not found, unable to restore cache.');
}
@@ -67,29 +68,12 @@ function getRequestOptions() {
function createHttpClient() {
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
return new http_client_1.HttpClient((0, user_agent_1.getUserAgentString)(), [bearerCredentialHandler], getRequestOptions());
}
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
// don't pass changes upstream
const components = paths.slice();
// Add compression method to cache version to restore
// compressed cache as per compression method
if (compressionMethod) {
components.push(compressionMethod);
}
// Only check for windows platforms if enableCrossOsArchive is false
if (process.platform === 'win32' && !enableCrossOsArchive) {
components.push('windows-only');
}
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt);
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
}
exports.getCacheVersion = getCacheVersion;
function getCacheEntry(keys, paths, options) {
return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient();
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
// Cache not found
@@ -160,7 +144,7 @@ exports.downloadCache = downloadCache;
function reserveCache(key, paths, options) {
return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient();
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const reserveCacheRequest = {
key,
version,
@@ -242,20 +226,30 @@ function commitCache(httpClient, cacheId, filesize) {
}));
});
}
function saveCache(cacheId, archivePath, options) {
function saveCache(cacheId, archivePath, signedUploadURL, options) {
return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient();
core.debug('Upload cache');
yield uploadFile(httpClient, cacheId, archivePath, options);
// Commit Cache
core.debug('Commiting cache');
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
// Use Azure storage SDK to upload caches directly to Azure
if (!signedUploadURL) {
throw new Error('Azure Storage SDK can only be used when a signed URL is provided.');
}
yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options);
}
else {
const httpClient = createHttpClient();
core.debug('Upload cache');
yield uploadFile(httpClient, cacheId, archivePath, options);
// Commit Cache
core.debug('Commiting cache');
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
}
core.info('Cache saved successfully');
}
core.info('Cache saved successfully');
});
}
exports.saveCache = saveCache;

File diff suppressed because one or more lines are too long

View File

@@ -9,4 +9,5 @@ export declare function getCompressionMethod(): Promise<CompressionMethod>;
export declare function getCacheFileName(compressionMethod: CompressionMethod): string;
export declare function getGnuTarPathOnWindows(): Promise<string>;
export declare function assertDefined<T>(name: string, value?: T): T;
export declare function isGhes(): boolean;
export declare function getCacheVersion(paths: string[], compressionMethod?: CompressionMethod, enableCrossOsArchive?: boolean): string;
export declare function getRuntimeToken(): string;

View File

@@ -39,7 +39,7 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.isGhes = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0;
exports.getRuntimeToken = exports.getCacheVersion = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0;
const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const glob = __importStar(require("@actions/glob"));
@@ -50,6 +50,7 @@ const path = __importStar(require("path"));
const semver = __importStar(require("semver"));
const util = __importStar(require("util"));
const constants_1 = require("./constants");
const versionSalt = '1.0';
// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23
function createTempDirectory() {
return __awaiter(this, void 0, void 0, function* () {
@@ -187,12 +188,29 @@ function assertDefined(name, value) {
return value;
}
exports.assertDefined = assertDefined;
function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
const isGitHubHost = hostname === 'GITHUB.COM';
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
return !isGitHubHost && !isGheHost;
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
// don't pass changes upstream
const components = paths.slice();
// Add compression method to cache version to restore
// compressed cache as per compression method
if (compressionMethod) {
components.push(compressionMethod);
}
// Only check for windows platforms if enableCrossOsArchive is false
if (process.platform === 'win32' && !enableCrossOsArchive) {
components.push('windows-only');
}
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt);
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
}
exports.isGhes = isGhes;
exports.getCacheVersion = getCacheVersion;
function getRuntimeToken() {
const token = process.env['ACTIONS_RUNTIME_TOKEN'];
if (!token) {
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable');
}
return token;
}
exports.getRuntimeToken = getRuntimeToken;
//# sourceMappingURL=cacheUtils.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"cacheUtils.js","sourceRoot":"","sources":["../../src/internal/cacheUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,oDAAqC;AACrC,oDAAqC;AACrC,gDAAiC;AACjC,+CAAgC;AAChC,uCAAwB;AACxB,2CAA4B;AAC5B,+CAAgC;AAChC,2CAA4B;AAC5B,2CAIoB;AAEpB,8FAA8F;AAC9F,SAAsB,mBAAmB;;QACvC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;QAE/C,IAAI,aAAa,GAAW,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;QAE5D,IAAI,CAAC,aAAa,EAAE;YAClB,IAAI,YAAoB,CAAA;YACxB,IAAI,UAAU,EAAE;gBACd,8CAA8C;gBAC9C,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,MAAM,CAAA;aACpD;iBAAM;gBACL,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;oBACjC,YAAY,GAAG,QAAQ,CAAA;iBACxB;qBAAM;oBACL,YAAY,GAAG,OAAO,CAAA;iBACvB;aACF;YACD,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,EAAE,MAAM,CAAC,CAAA;SAC3D;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;QAC1D,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QACrB,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAvBD,kDAuBC;AAED,SAAgB,yBAAyB,CAAC,QAAgB;IACxD,OAAO,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAA;AACnC,CAAC;AAFD,8DAEC;AAED,SAAsB,YAAY,CAAC,QAAkB;;;;QACnD,MAAM,KAAK,GAAa,EAAE,CAAA;QAC1B,MAAM,SAAS,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,mCAAI,OAAO,CAAC,GAAG,EAAE,CAAA;QAClE,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;YACrD,mBAAmB,EAAE,KAAK;SAC3B,CAAC,CAAA;;YAEF,KAAyB,eAAA,KAAA,cAAA,OAAO,CAAC,aAAa,EAAE,CAAA,IAAA,sDAAE;gBAAzB,cAAuB;gBAAvB,WAAuB;gBAArC,MAAM,IAAI,KAAA,CAAA;gBACnB,MAAM,YAAY,GAAG,IAAI;qBACtB,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC;qBACzB,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,CAAA;gBACjD,IAAI,CAAC,KAAK,CAAC,YAAY,YAAY,EAAE,CAAC,CAAA;gBACtC,4FAA4F;gBAC5F,IAAI,YAAY,KAAK,EAAE,EAAE;oBACvB,qEAAqE;oBACrE,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;iBAChB;qBAAM;oBACL,KAAK,CAAC,IAAI,CAAC,GAAG,YAAY,EAAE,CAAC,CAAA;iBAC9B;aACF;;;;;;;;;QAED,OAAO,KAAK,CAAA;;CACb;AAtBD,oCAsBC;AAED,SAAsB,UAAU,CAAC,QAAqB;;QACpD,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAA;IAC5C,CAAC;CAAA;AAFD,gCAEC;AAED,SAAe,UAAU,CACvB,GAAW,EACX,iBAA2B,EAAE;;QAE7B,IAAI,aAAa,GAAG,EAAE,CAAA;QACtB,cAAc,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;QAChC,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAA;QACzD,IAAI;YACF,MAAM,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,EAAE,EAAE,cAAc,EAAE;gBACxC,gBAAgB,EAAE,IAAI;gBACtB,MAAM,EAAE,IAAI;gBACZ,SAAS,EAAE;oBACT,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;oBACpE,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;iBACrE;aACF,CAAC,CAAA;SACH;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;SACxB;QAED,aAAa,GAAG,aAAa,CAAC,IAAI,EAAE,CAAA;QACpC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QACzB,OAAO,aAAa,CAAA;IACtB,CAAC;CAAA;AAED,0DAA0D;AAC1D,SAAsB,oBAAoB;;QACxC,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC,CAAA;QAC3D,MAAM,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QAC3C,IAAI,CAAC,KAAK,CAAC,iBAAiB,OAAO,EAAE,CAAC,CAAA;QAEtC,IAAI,aAAa,KAAK,EAAE,EAAE;YACxB,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;aAAM;YACL,OAAO,6BAAiB,CAAC,eAAe,CAAA;SACzC;IACH,CAAC;CAAA;AAVD,oDAUC;AAED,SAAgB,gBAAgB,CAAC,iBAAoC;IACnE,OAAO,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;QACjD,CAAC,CAAC,yBAAa,CAAC,IAAI;QACpB,CAAC,CAAC,yBAAa,CAAC,IAAI,CAAA;AACxB,CAAC;AAJD,4CAIC;AAED,SAAsB,sBAAsB;;QAC1C,IAAI,EAAE,CAAC,UAAU,CAAC,+BAAmB,CAAC,EAAE;YACtC,OAAO,+BAAmB,CAAA;SAC3B;QACD,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,KAAK,CAAC,CAAA;QAC7C,OAAO,aAAa,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAA;IAC/E,CAAC;CAAA;AAND,wDAMC;AAED,SAAgB,aAAa,CAAI,IAAY,EAAE,KAAS;IACtD,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,KAAK,CAAC,YAAY,IAAI,0BAA0B,CAAC,CAAA;KACxD;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAND,sCAMC;AAED,SAAgB,MAAM;IACpB,MAAM,KAAK,GAAG,IAAI,GAAG,CACnB,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,oBAAoB,CACzD,CAAA;IAED,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,WAAW,EAAE,CAAA;IACvD,MAAM,YAAY,GAAG,QAAQ,KAAK,YAAY,CAAA;IAC9C,MAAM,SAAS,GACb,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,QAAQ,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAA;IAEtE,OAAO,CAAC,YAAY,IAAI,CAAC,SAAS,CAAA;AACpC,CAAC;AAXD,wBAWC"}
{"version":3,"file":"cacheUtils.js","sourceRoot":"","sources":["../../src/internal/cacheUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,oDAAqC;AACrC,oDAAqC;AACrC,gDAAiC;AACjC,+CAAgC;AAChC,uCAAwB;AACxB,2CAA4B;AAC5B,+CAAgC;AAChC,2CAA4B;AAC5B,2CAIoB;AAEpB,MAAM,WAAW,GAAG,KAAK,CAAA;AAEzB,8FAA8F;AAC9F,SAAsB,mBAAmB;;QACvC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;QAE/C,IAAI,aAAa,GAAW,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;QAE5D,IAAI,CAAC,aAAa,EAAE;YAClB,IAAI,YAAoB,CAAA;YACxB,IAAI,UAAU,EAAE;gBACd,8CAA8C;gBAC9C,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,MAAM,CAAA;aACpD;iBAAM;gBACL,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;oBACjC,YAAY,GAAG,QAAQ,CAAA;iBACxB;qBAAM;oBACL,YAAY,GAAG,OAAO,CAAA;iBACvB;aACF;YACD,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,EAAE,MAAM,CAAC,CAAA;SAC3D;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;QAC1D,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QACrB,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAvBD,kDAuBC;AAED,SAAgB,yBAAyB,CAAC,QAAgB;IACxD,OAAO,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAA;AACnC,CAAC;AAFD,8DAEC;AAED,SAAsB,YAAY,CAAC,QAAkB;;;;QACnD,MAAM,KAAK,GAAa,EAAE,CAAA;QAC1B,MAAM,SAAS,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,mCAAI,OAAO,CAAC,GAAG,EAAE,CAAA;QAClE,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;YACrD,mBAAmB,EAAE,KAAK;SAC3B,CAAC,CAAA;;YAEF,KAAyB,eAAA,KAAA,cAAA,OAAO,CAAC,aAAa,EAAE,CAAA,IAAA,sDAAE;gBAAzB,cAAuB;gBAAvB,WAAuB;gBAArC,MAAM,IAAI,KAAA,CAAA;gBACnB,MAAM,YAAY,GAAG,IAAI;qBACtB,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC;qBACzB,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,CAAA;gBACjD,IAAI,CAAC,KAAK,CAAC,YAAY,YAAY,EAAE,CAAC,CAAA;gBACtC,4FAA4F;gBAC5F,IAAI,YAAY,KAAK,EAAE,EAAE;oBACvB,qEAAqE;oBACrE,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;iBAChB;qBAAM;oBACL,KAAK,CAAC,IAAI,CAAC,GAAG,YAAY,EAAE,CAAC,CAAA;iBAC9B;aACF;;;;;;;;;QAED,OAAO,KAAK,CAAA;;CACb;AAtBD,oCAsBC;AAED,SAAsB,UAAU,CAAC,QAAqB;;QACpD,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAA;IAC5C,CAAC;CAAA;AAFD,gCAEC;AAED,SAAe,UAAU,CACvB,GAAW,EACX,iBAA2B,EAAE;;QAE7B,IAAI,aAAa,GAAG,EAAE,CAAA;QACtB,cAAc,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;QAChC,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAA;QACzD,IAAI;YACF,MAAM,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,EAAE,EAAE,cAAc,EAAE;gBACxC,gBAAgB,EAAE,IAAI;gBACtB,MAAM,EAAE,IAAI;gBACZ,SAAS,EAAE;oBACT,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;oBACpE,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;iBACrE;aACF,CAAC,CAAA;SACH;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;SACxB;QAED,aAAa,GAAG,aAAa,CAAC,IAAI,EAAE,CAAA;QACpC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QACzB,OAAO,aAAa,CAAA;IACtB,CAAC;CAAA;AAED,0DAA0D;AAC1D,SAAsB,oBAAoB;;QACxC,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC,CAAA;QAC3D,MAAM,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QAC3C,IAAI,CAAC,KAAK,CAAC,iBAAiB,OAAO,EAAE,CAAC,CAAA;QAEtC,IAAI,aAAa,KAAK,EAAE,EAAE;YACxB,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;aAAM;YACL,OAAO,6BAAiB,CAAC,eAAe,CAAA;SACzC;IACH,CAAC;CAAA;AAVD,oDAUC;AAED,SAAgB,gBAAgB,CAAC,iBAAoC;IACnE,OAAO,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;QACjD,CAAC,CAAC,yBAAa,CAAC,IAAI;QACpB,CAAC,CAAC,yBAAa,CAAC,IAAI,CAAA;AACxB,CAAC;AAJD,4CAIC;AAED,SAAsB,sBAAsB;;QAC1C,IAAI,EAAE,CAAC,UAAU,CAAC,+BAAmB,CAAC,EAAE;YACtC,OAAO,+BAAmB,CAAA;SAC3B;QACD,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,KAAK,CAAC,CAAA;QAC7C,OAAO,aAAa,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAA;IAC/E,CAAC;CAAA;AAND,wDAMC;AAED,SAAgB,aAAa,CAAI,IAAY,EAAE,KAAS;IACtD,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,KAAK,CAAC,YAAY,IAAI,0BAA0B,CAAC,CAAA;KACxD;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAND,sCAMC;AAED,SAAgB,eAAe,CAC7B,KAAe,EACf,iBAAqC,EACrC,oBAAoB,GAAG,KAAK;IAE5B,8BAA8B;IAC9B,MAAM,UAAU,GAAG,KAAK,CAAC,KAAK,EAAE,CAAA;IAEhC,qDAAqD;IACrD,6CAA6C;IAC7C,IAAI,iBAAiB,EAAE;QACrB,UAAU,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAA;KACnC;IAED,oEAAoE;IACpE,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,IAAI,CAAC,oBAAoB,EAAE;QACzD,UAAU,CAAC,IAAI,CAAC,cAAc,CAAC,CAAA;KAChC;IAED,uEAAuE;IACvE,UAAU,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;IAE5B,OAAO,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;AAC/E,CAAC;AAvBD,0CAuBC;AAED,SAAgB,eAAe;IAC7B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAA;KACxE;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAND,0CAMC"}

3
node_modules/@actions/cache/lib/internal/config.d.ts generated vendored Normal file
View File

@@ -0,0 +1,3 @@
export declare function isGhes(): boolean;
export declare function getCacheServiceVersion(): string;
export declare function getCacheServiceURL(): string;

37
node_modules/@actions/cache/lib/internal/config.js generated vendored Normal file
View File

@@ -0,0 +1,37 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getCacheServiceURL = exports.getCacheServiceVersion = exports.isGhes = void 0;
function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
const isGitHubHost = hostname === 'GITHUB.COM';
const isGheHost = hostname.endsWith('.GHE.COM');
const isLocalHost = hostname.endsWith('.LOCALHOST');
return !isGitHubHost && !isGheHost && !isLocalHost;
}
exports.isGhes = isGhes;
function getCacheServiceVersion() {
// Cache service v2 is not supported on GHES. We will default to
// cache service v1 even if the feature flag was enabled by user.
if (isGhes())
return 'v1';
return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1';
}
exports.getCacheServiceVersion = getCacheServiceVersion;
function getCacheServiceURL() {
const version = getCacheServiceVersion();
// Based on the version of the cache service, we will determine which
// URL to use.
switch (version) {
case 'v1':
return (process.env['ACTIONS_CACHE_URL'] ||
process.env['ACTIONS_RESULTS_URL'] ||
'');
case 'v2':
return process.env['ACTIONS_RESULTS_URL'] || '';
default:
throw new Error(`Unsupported cache service version: ${version}`);
}
}
exports.getCacheServiceURL = getCacheServiceURL;
//# sourceMappingURL=config.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"config.js","sourceRoot":"","sources":["../../src/internal/config.ts"],"names":[],"mappings":";;;AAAA,SAAgB,MAAM;IACpB,MAAM,KAAK,GAAG,IAAI,GAAG,CACnB,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,oBAAoB,CACzD,CAAA;IAED,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,WAAW,EAAE,CAAA;IACvD,MAAM,YAAY,GAAG,QAAQ,KAAK,YAAY,CAAA;IAC9C,MAAM,SAAS,GAAG,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;IAC/C,MAAM,WAAW,GAAG,QAAQ,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAA;IAEnD,OAAO,CAAC,YAAY,IAAI,CAAC,SAAS,IAAI,CAAC,WAAW,CAAA;AACpD,CAAC;AAXD,wBAWC;AAED,SAAgB,sBAAsB;IACpC,gEAAgE;IAChE,iEAAiE;IACjE,IAAI,MAAM,EAAE;QAAE,OAAO,IAAI,CAAA;IAEzB,OAAO,OAAO,CAAC,GAAG,CAAC,0BAA0B,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAA;AAC9D,CAAC;AAND,wDAMC;AAED,SAAgB,kBAAkB;IAChC,MAAM,OAAO,GAAG,sBAAsB,EAAE,CAAA;IAExC,qEAAqE;IACrE,cAAc;IACd,QAAQ,OAAO,EAAE;QACf,KAAK,IAAI;YACP,OAAO,CACL,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;gBAChC,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC;gBAClC,EAAE,CACH,CAAA;QACH,KAAK,IAAI;YACP,OAAO,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,IAAI,EAAE,CAAA;QACjD;YACE,MAAM,IAAI,KAAK,CAAC,sCAAsC,OAAO,EAAE,CAAC,CAAA;KACnE;AACH,CAAC;AAjBD,gDAiBC"}

View File

@@ -18,3 +18,4 @@ export declare const GnuTarPathOnWindows: string;
export declare const SystemTarPathOnWindows: string;
export declare const TarFilename = "cache.tar";
export declare const ManifestFilename = "manifest.txt";
export declare const CacheFileSizeLimit: number;

View File

@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0;
exports.CacheFileSizeLimit = exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0;
var CacheFilename;
(function (CacheFilename) {
CacheFilename["Gzip"] = "cache.tgz";
@@ -33,4 +33,5 @@ exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\ta
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
exports.TarFilename = 'cache.tar';
exports.ManifestFilename = 'manifest.txt';
exports.CacheFileSizeLimit = 10 * Math.pow(1024, 3); // 10GiB per repository
//# sourceMappingURL=constants.js.map

View File

@@ -1 +1 @@
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/internal/constants.ts"],"names":[],"mappings":";;;AAAA,IAAY,aAGX;AAHD,WAAY,aAAa;IACvB,mCAAkB,CAAA;IAClB,oCAAmB,CAAA;AACrB,CAAC,EAHW,aAAa,6BAAb,aAAa,QAGxB;AAED,IAAY,iBAMX;AAND,WAAY,iBAAiB;IAC3B,kCAAa,CAAA;IACb,+CAA+C;IAC/C,6EAA6E;IAC7E,0DAAqC,CAAA;IACrC,kCAAa,CAAA;AACf,CAAC,EANW,iBAAiB,iCAAjB,iBAAiB,QAM5B;AAED,IAAY,eAGX;AAHD,WAAY,eAAe;IACzB,8BAAW,CAAA;IACX,8BAAW,CAAA;AACb,CAAC,EAHW,eAAe,+BAAf,eAAe,QAG1B;AAED,wCAAwC;AAC3B,QAAA,oBAAoB,GAAG,CAAC,CAAA;AAErC,4DAA4D;AAC/C,QAAA,iBAAiB,GAAG,IAAI,CAAA;AAErC,6EAA6E;AAC7E,+EAA+E;AAC/E,cAAc;AACD,QAAA,aAAa,GAAG,IAAI,CAAA;AAEjC,uDAAuD;AAC1C,QAAA,mBAAmB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,0BAA0B,CAAA;AAE3F,uDAAuD;AAC1C,QAAA,sBAAsB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,8BAA8B,CAAA;AAEpF,QAAA,WAAW,GAAG,WAAW,CAAA;AAEzB,QAAA,gBAAgB,GAAG,cAAc,CAAA"}
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/internal/constants.ts"],"names":[],"mappings":";;;AAAA,IAAY,aAGX;AAHD,WAAY,aAAa;IACvB,mCAAkB,CAAA;IAClB,oCAAmB,CAAA;AACrB,CAAC,EAHW,aAAa,6BAAb,aAAa,QAGxB;AAED,IAAY,iBAMX;AAND,WAAY,iBAAiB;IAC3B,kCAAa,CAAA;IACb,+CAA+C;IAC/C,6EAA6E;IAC7E,0DAAqC,CAAA;IACrC,kCAAa,CAAA;AACf,CAAC,EANW,iBAAiB,iCAAjB,iBAAiB,QAM5B;AAED,IAAY,eAGX;AAHD,WAAY,eAAe;IACzB,8BAAW,CAAA;IACX,8BAAW,CAAA;AACb,CAAC,EAHW,eAAe,+BAAf,eAAe,QAG1B;AAED,wCAAwC;AAC3B,QAAA,oBAAoB,GAAG,CAAC,CAAA;AAErC,4DAA4D;AAC/C,QAAA,iBAAiB,GAAG,IAAI,CAAA;AAErC,6EAA6E;AAC7E,+EAA+E;AAC/E,cAAc;AACD,QAAA,aAAa,GAAG,IAAI,CAAA;AAEjC,uDAAuD;AAC1C,QAAA,mBAAmB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,0BAA0B,CAAA;AAE3F,uDAAuD;AAC1C,QAAA,sBAAsB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,8BAA8B,CAAA;AAEpF,QAAA,WAAW,GAAG,WAAW,CAAA;AAEzB,QAAA,gBAAgB,GAAG,cAAc,CAAA;AAEjC,QAAA,kBAAkB,GAAG,EAAE,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA,CAAC,uBAAuB"}

View File

@@ -0,0 +1,6 @@
import { CacheServiceClientJSON } from '../../generated/results/api/v1/cache.twirp';
export declare function internalCacheTwirpClient(options?: {
maxAttempts?: number;
retryIntervalMs?: number;
retryMultiplier?: number;
}): CacheServiceClientJSON;

View File

@@ -0,0 +1,160 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.internalCacheTwirpClient = void 0;
const core_1 = require("@actions/core");
const user_agent_1 = require("./user-agent");
const errors_1 = require("./errors");
const config_1 = require("../config");
const cacheUtils_1 = require("../cacheUtils");
const auth_1 = require("@actions/http-client/lib/auth");
const http_client_1 = require("@actions/http-client");
const cache_twirp_1 = require("../../generated/results/api/v1/cache.twirp");
/**
* This class is a wrapper around the CacheServiceClientJSON class generated by Twirp.
*
* It adds retry logic to the request method, which is not present in the generated client.
*
* This class is used to interact with cache service v2.
*/
class CacheServiceClient {
constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) {
this.maxAttempts = 5;
this.baseRetryIntervalMilliseconds = 3000;
this.retryMultiplier = 1.5;
const token = (0, cacheUtils_1.getRuntimeToken)();
this.baseUrl = (0, config_1.getCacheServiceURL)();
if (maxAttempts) {
this.maxAttempts = maxAttempts;
}
if (baseRetryIntervalMilliseconds) {
this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds;
}
if (retryMultiplier) {
this.retryMultiplier = retryMultiplier;
}
this.httpClient = new http_client_1.HttpClient(userAgent, [
new auth_1.BearerCredentialHandler(token)
]);
}
// This function satisfies the Rpc interface. It is compatible with the JSON
// JSON generated client.
request(service, method, contentType, data) {
return __awaiter(this, void 0, void 0, function* () {
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href;
(0, core_1.debug)(`[Request] ${method} ${url}`);
const headers = {
'Content-Type': contentType
};
try {
const { body } = yield this.retryableRequest(() => __awaiter(this, void 0, void 0, function* () { return this.httpClient.post(url, JSON.stringify(data), headers); }));
return body;
}
catch (error) {
throw new Error(`Failed to ${method}: ${error.message}`);
}
});
}
retryableRequest(operation) {
return __awaiter(this, void 0, void 0, function* () {
let attempt = 0;
let errorMessage = '';
let rawBody = '';
while (attempt < this.maxAttempts) {
let isRetryable = false;
try {
const response = yield operation();
const statusCode = response.message.statusCode;
rawBody = yield response.readBody();
(0, core_1.debug)(`[Response] - ${response.message.statusCode}`);
(0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`);
const body = JSON.parse(rawBody);
(0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`);
if (this.isSuccessStatusCode(statusCode)) {
return { response, body };
}
isRetryable = this.isRetryableHttpStatusCode(statusCode);
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`;
if (body.msg) {
if (errors_1.UsageError.isUsageErrorMessage(body.msg)) {
throw new errors_1.UsageError();
}
errorMessage = `${errorMessage}: ${body.msg}`;
}
}
catch (error) {
if (error instanceof SyntaxError) {
(0, core_1.debug)(`Raw Body: ${rawBody}`);
}
if (error instanceof errors_1.UsageError) {
throw error;
}
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code);
}
isRetryable = true;
errorMessage = error.message;
}
if (!isRetryable) {
throw new Error(`Received non-retryable error: ${errorMessage}`);
}
if (attempt + 1 === this.maxAttempts) {
throw new Error(`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`);
}
const retryTimeMilliseconds = this.getExponentialRetryTimeMilliseconds(attempt);
(0, core_1.info)(`Attempt ${attempt + 1} of ${this.maxAttempts} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`);
yield this.sleep(retryTimeMilliseconds);
attempt++;
}
throw new Error(`Request failed`);
});
}
isSuccessStatusCode(statusCode) {
if (!statusCode)
return false;
return statusCode >= 200 && statusCode < 300;
}
isRetryableHttpStatusCode(statusCode) {
if (!statusCode)
return false;
const retryableStatusCodes = [
http_client_1.HttpCodes.BadGateway,
http_client_1.HttpCodes.GatewayTimeout,
http_client_1.HttpCodes.InternalServerError,
http_client_1.HttpCodes.ServiceUnavailable,
http_client_1.HttpCodes.TooManyRequests
];
return retryableStatusCodes.includes(statusCode);
}
sleep(milliseconds) {
return __awaiter(this, void 0, void 0, function* () {
return new Promise(resolve => setTimeout(resolve, milliseconds));
});
}
getExponentialRetryTimeMilliseconds(attempt) {
if (attempt < 0) {
throw new Error('attempt should be a positive integer');
}
if (attempt === 0) {
return this.baseRetryIntervalMilliseconds;
}
const minTime = this.baseRetryIntervalMilliseconds * Math.pow(this.retryMultiplier, attempt);
const maxTime = minTime * this.retryMultiplier;
// returns a random number between minTime and maxTime (exclusive)
return Math.trunc(Math.random() * (maxTime - minTime) + minTime);
}
}
function internalCacheTwirpClient(options) {
const client = new CacheServiceClient((0, user_agent_1.getUserAgentString)(), options === null || options === void 0 ? void 0 : options.maxAttempts, options === null || options === void 0 ? void 0 : options.retryIntervalMs, options === null || options === void 0 ? void 0 : options.retryMultiplier);
return new cache_twirp_1.CacheServiceClientJSON(client);
}
exports.internalCacheTwirpClient = internalCacheTwirpClient;
//# sourceMappingURL=cacheTwirpClient.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"cacheTwirpClient.js","sourceRoot":"","sources":["../../../src/internal/shared/cacheTwirpClient.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,wCAAyC;AACzC,6CAA+C;AAC/C,qCAAiD;AACjD,sCAA4C;AAC5C,8CAA6C;AAC7C,wDAAqE;AACrE,sDAA8E;AAC9E,4EAAiF;AAYjF;;;;;;GAMG;AACH,MAAM,kBAAkB;IAOtB,YACE,SAAiB,EACjB,WAAoB,EACpB,6BAAsC,EACtC,eAAwB;QARlB,gBAAW,GAAG,CAAC,CAAA;QACf,kCAA6B,GAAG,IAAI,CAAA;QACpC,oBAAe,GAAG,GAAG,CAAA;QAQ3B,MAAM,KAAK,GAAG,IAAA,4BAAe,GAAE,CAAA;QAC/B,IAAI,CAAC,OAAO,GAAG,IAAA,2BAAkB,GAAE,CAAA;QACnC,IAAI,WAAW,EAAE;YACf,IAAI,CAAC,WAAW,GAAG,WAAW,CAAA;SAC/B;QACD,IAAI,6BAA6B,EAAE;YACjC,IAAI,CAAC,6BAA6B,GAAG,6BAA6B,CAAA;SACnE;QACD,IAAI,eAAe,EAAE;YACnB,IAAI,CAAC,eAAe,GAAG,eAAe,CAAA;SACvC;QAED,IAAI,CAAC,UAAU,GAAG,IAAI,wBAAU,CAAC,SAAS,EAAE;YAC1C,IAAI,8BAAuB,CAAC,KAAK,CAAC;SACnC,CAAC,CAAA;IACJ,CAAC;IAED,4EAA4E;IAC5E,yBAAyB;IACnB,OAAO,CACX,OAAe,EACf,MAAc,EACd,WAAwD,EACxD,IAAyB;;YAEzB,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,UAAU,OAAO,IAAI,MAAM,EAAE,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,IAAI,CAAA;YACrE,IAAA,YAAK,EAAC,aAAa,MAAM,IAAI,GAAG,EAAE,CAAC,CAAA;YACnC,MAAM,OAAO,GAAG;gBACd,cAAc,EAAE,WAAW;aAC5B,CAAA;YACD,IAAI;gBACF,MAAM,EAAC,IAAI,EAAC,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,GAAS,EAAE,gDACpD,OAAA,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,OAAO,CAAC,CAAA,GAAA,CACzD,CAAA;gBAED,OAAO,IAAI,CAAA;aACZ;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,aAAa,MAAM,KAAK,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACzD;QACH,CAAC;KAAA;IAEK,gBAAgB,CACpB,SAA4C;;YAE5C,IAAI,OAAO,GAAG,CAAC,CAAA;YACf,IAAI,YAAY,GAAG,EAAE,CAAA;YACrB,IAAI,OAAO,GAAG,EAAE,CAAA;YAChB,OAAO,OAAO,GAAG,IAAI,CAAC,WAAW,EAAE;gBACjC,IAAI,WAAW,GAAG,KAAK,CAAA;gBAEvB,IAAI;oBACF,MAAM,QAAQ,GAAG,MAAM,SAAS,EAAE,CAAA;oBAClC,MAAM,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAA;oBAC9C,OAAO,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBACnC,IAAA,YAAK,EAAC,gBAAgB,QAAQ,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;oBACpD,IAAA,YAAK,EAAC,YAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAA;oBACtE,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;oBAChC,IAAA,YAAK,EAAC,SAAS,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAA;oBAC/C,IAAI,IAAI,CAAC,mBAAmB,CAAC,UAAU,CAAC,EAAE;wBACxC,OAAO,EAAC,QAAQ,EAAE,IAAI,EAAC,CAAA;qBACxB;oBACD,WAAW,GAAG,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,CAAA;oBACxD,YAAY,GAAG,oBAAoB,UAAU,KAAK,QAAQ,CAAC,OAAO,CAAC,aAAa,EAAE,CAAA;oBAClF,IAAI,IAAI,CAAC,GAAG,EAAE;wBACZ,IAAI,mBAAU,CAAC,mBAAmB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;4BAC5C,MAAM,IAAI,mBAAU,EAAE,CAAA;yBACvB;wBAED,YAAY,GAAG,GAAG,YAAY,KAAK,IAAI,CAAC,GAAG,EAAE,CAAA;qBAC9C;iBACF;gBAAC,OAAO,KAAK,EAAE;oBACd,IAAI,KAAK,YAAY,WAAW,EAAE;wBAChC,IAAA,YAAK,EAAC,aAAa,OAAO,EAAE,CAAC,CAAA;qBAC9B;oBAED,IAAI,KAAK,YAAY,mBAAU,EAAE;wBAC/B,MAAM,KAAK,CAAA;qBACZ;oBAED,IAAI,qBAAY,CAAC,kBAAkB,CAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,IAAI,CAAC,EAAE;wBAChD,MAAM,IAAI,qBAAY,CAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,IAAI,CAAC,CAAA;qBACpC;oBAED,WAAW,GAAG,IAAI,CAAA;oBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;iBAC7B;gBAED,IAAI,CAAC,WAAW,EAAE;oBAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,YAAY,EAAE,CAAC,CAAA;iBACjE;gBAED,IAAI,OAAO,GAAG,CAAC,KAAK,IAAI,CAAC,WAAW,EAAE;oBACpC,MAAM,IAAI,KAAK,CACb,gCAAgC,IAAI,CAAC,WAAW,cAAc,YAAY,EAAE,CAC7E,CAAA;iBACF;gBAED,MAAM,qBAAqB,GACzB,IAAI,CAAC,mCAAmC,CAAC,OAAO,CAAC,CAAA;gBACnD,IAAA,WAAI,EACF,WAAW,OAAO,GAAG,CAAC,OACpB,IAAI,CAAC,WACP,uBAAuB,YAAY,yBAAyB,qBAAqB,QAAQ,CAC1F,CAAA;gBACD,MAAM,IAAI,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAA;gBACvC,OAAO,EAAE,CAAA;aACV;YAED,MAAM,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAA;QACnC,CAAC;KAAA;IAED,mBAAmB,CAAC,UAAmB;QACrC,IAAI,CAAC,UAAU;YAAE,OAAO,KAAK,CAAA;QAC7B,OAAO,UAAU,IAAI,GAAG,IAAI,UAAU,GAAG,GAAG,CAAA;IAC9C,CAAC;IAED,yBAAyB,CAAC,UAAmB;QAC3C,IAAI,CAAC,UAAU;YAAE,OAAO,KAAK,CAAA;QAE7B,MAAM,oBAAoB,GAAG;YAC3B,uBAAS,CAAC,UAAU;YACpB,uBAAS,CAAC,cAAc;YACxB,uBAAS,CAAC,mBAAmB;YAC7B,uBAAS,CAAC,kBAAkB;YAC5B,uBAAS,CAAC,eAAe;SAC1B,CAAA;QAED,OAAO,oBAAoB,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;IAClD,CAAC;IAEK,KAAK,CAAC,YAAoB;;YAC9B,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAA;QAClE,CAAC;KAAA;IAED,mCAAmC,CAAC,OAAe;QACjD,IAAI,OAAO,GAAG,CAAC,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAA;SACxD;QAED,IAAI,OAAO,KAAK,CAAC,EAAE;YACjB,OAAO,IAAI,CAAC,6BAA6B,CAAA;SAC1C;QAED,MAAM,OAAO,GACX,IAAI,CAAC,6BAA6B,GAAG,SAAA,IAAI,CAAC,eAAe,EAAI,OAAO,CAAA,CAAA;QACtE,MAAM,OAAO,GAAG,OAAO,GAAG,IAAI,CAAC,eAAe,CAAA;QAE9C,kEAAkE;QAClE,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,GAAG,OAAO,CAAC,CAAA;IAClE,CAAC;CACF;AAED,SAAgB,wBAAwB,CAAC,OAIxC;IACC,MAAM,MAAM,GAAG,IAAI,kBAAkB,CACnC,IAAA,+BAAkB,GAAE,EACpB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EACpB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe,EACxB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe,CACzB,CAAA;IACD,OAAO,IAAI,oCAAsB,CAAC,MAAM,CAAC,CAAA;AAC3C,CAAC;AAZD,4DAYC"}

View File

@@ -0,0 +1,22 @@
export declare class FilesNotFoundError extends Error {
files: string[];
constructor(files?: string[]);
}
export declare class InvalidResponseError extends Error {
constructor(message: string);
}
export declare class CacheNotFoundError extends Error {
constructor(message?: string);
}
export declare class GHESNotSupportedError extends Error {
constructor(message?: string);
}
export declare class NetworkError extends Error {
code: string;
constructor(code: string);
static isNetworkErrorCode: (code?: string) => boolean;
}
export declare class UsageError extends Error {
constructor();
static isUsageErrorMessage: (msg?: string) => boolean;
}

View File

@@ -0,0 +1,70 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.UsageError = exports.NetworkError = exports.GHESNotSupportedError = exports.CacheNotFoundError = exports.InvalidResponseError = exports.FilesNotFoundError = void 0;
class FilesNotFoundError extends Error {
constructor(files = []) {
let message = 'No files were found to upload';
if (files.length > 0) {
message += `: ${files.join(', ')}`;
}
super(message);
this.files = files;
this.name = 'FilesNotFoundError';
}
}
exports.FilesNotFoundError = FilesNotFoundError;
class InvalidResponseError extends Error {
constructor(message) {
super(message);
this.name = 'InvalidResponseError';
}
}
exports.InvalidResponseError = InvalidResponseError;
class CacheNotFoundError extends Error {
constructor(message = 'Cache not found') {
super(message);
this.name = 'CacheNotFoundError';
}
}
exports.CacheNotFoundError = CacheNotFoundError;
class GHESNotSupportedError extends Error {
constructor(message = '@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.') {
super(message);
this.name = 'GHESNotSupportedError';
}
}
exports.GHESNotSupportedError = GHESNotSupportedError;
class NetworkError extends Error {
constructor(code) {
const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`;
super(message);
this.code = code;
this.name = 'NetworkError';
}
}
exports.NetworkError = NetworkError;
NetworkError.isNetworkErrorCode = (code) => {
if (!code)
return false;
return [
'ECONNRESET',
'ENOTFOUND',
'ETIMEDOUT',
'ECONNREFUSED',
'EHOSTUNREACH'
].includes(code);
};
class UsageError extends Error {
constructor() {
const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`;
super(message);
this.name = 'UsageError';
}
}
exports.UsageError = UsageError;
UsageError.isUsageErrorMessage = (msg) => {
if (!msg)
return false;
return msg.includes('insufficient usage');
};
//# sourceMappingURL=errors.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"errors.js","sourceRoot":"","sources":["../../../src/internal/shared/errors.ts"],"names":[],"mappings":";;;AAAA,MAAa,kBAAmB,SAAQ,KAAK;IAG3C,YAAY,QAAkB,EAAE;QAC9B,IAAI,OAAO,GAAG,+BAA+B,CAAA;QAC7C,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;YACpB,OAAO,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAA;SACnC;QAED,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;QAClB,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAA;IAClC,CAAC;CACF;AAbD,gDAaC;AAED,MAAa,oBAAqB,SAAQ,KAAK;IAC7C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,sBAAsB,CAAA;IACpC,CAAC;CACF;AALD,oDAKC;AAED,MAAa,kBAAmB,SAAQ,KAAK;IAC3C,YAAY,OAAO,GAAG,iBAAiB;QACrC,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAA;IAClC,CAAC;CACF;AALD,gDAKC;AAED,MAAa,qBAAsB,SAAQ,KAAK;IAC9C,YACE,OAAO,GAAG,mHAAmH;QAE7H,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,uBAAuB,CAAA;IACrC,CAAC;CACF;AAPD,sDAOC;AAED,MAAa,YAAa,SAAQ,KAAK;IAGrC,YAAY,IAAY;QACtB,MAAM,OAAO,GAAG,2BAA2B,IAAI,kRAAkR,CAAA;QACjU,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,GAAG,cAAc,CAAA;IAC5B,CAAC;;AARH,oCAoBC;AAVQ,+BAAkB,GAAG,CAAC,IAAa,EAAW,EAAE;IACrD,IAAI,CAAC,IAAI;QAAE,OAAO,KAAK,CAAA;IACvB,OAAO;QACL,YAAY;QACZ,WAAW;QACX,WAAW;QACX,cAAc;QACd,cAAc;KACf,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAA;AAClB,CAAC,CAAA;AAGH,MAAa,UAAW,SAAQ,KAAK;IACnC;QACE,MAAM,OAAO,GAAG,iSAAiS,CAAA;QACjT,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,YAAY,CAAA;IAC1B,CAAC;;AALH,gCAWC;AAJQ,8BAAmB,GAAG,CAAC,GAAY,EAAW,EAAE;IACrD,IAAI,CAAC,GAAG;QAAE,OAAO,KAAK,CAAA;IACtB,OAAO,GAAG,CAAC,QAAQ,CAAC,oBAAoB,CAAC,CAAA;AAC3C,CAAC,CAAA"}

View File

@@ -0,0 +1,4 @@
/**
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
*/
export declare function getUserAgentString(): string;

View File

@@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getUserAgentString = void 0;
// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports
const packageJson = require('../../../package.json');
/**
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
*/
function getUserAgentString() {
return `@actions/cache-${packageJson.version}`;
}
exports.getUserAgentString = getUserAgentString;
//# sourceMappingURL=user-agent.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"user-agent.js","sourceRoot":"","sources":["../../../src/internal/shared/user-agent.ts"],"names":[],"mappings":";;;AAAA,qGAAqG;AACrG,MAAM,WAAW,GAAG,OAAO,CAAC,uBAAuB,CAAC,CAAA;AAEpD;;GAEG;AACH,SAAgB,kBAAkB;IAChC,OAAO,kBAAkB,WAAW,CAAC,OAAO,EAAE,CAAA;AAChD,CAAC;AAFD,gDAEC"}

View File

@@ -0,0 +1,60 @@
import { BlobUploadCommonResponse } from '@azure/storage-blob';
import { TransferProgressEvent } from '@azure/ms-rest-js';
import { UploadOptions } from '../options';
/**
* Class for tracking the upload state and displaying stats.
*/
export declare class UploadProgress {
contentLength: number;
sentBytes: number;
startTime: number;
displayedComplete: boolean;
timeoutHandle?: ReturnType<typeof setTimeout>;
constructor(contentLength: number);
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes: number): void;
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes(): number;
/**
* Returns true if the upload is complete.
*/
isDone(): boolean;
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display(): void;
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress(): (progress: TransferProgressEvent) => void;
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs?: number): void;
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer(): void;
}
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
export declare function uploadCacheArchiveSDK(signedUploadURL: string, archivePath: string, options?: UploadOptions): Promise<BlobUploadCommonResponse>;

167
node_modules/@actions/cache/lib/internal/uploadUtils.js generated vendored Normal file
View File

@@ -0,0 +1,167 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.uploadCacheArchiveSDK = exports.UploadProgress = void 0;
const core = __importStar(require("@actions/core"));
const storage_blob_1 = require("@azure/storage-blob");
const errors_1 = require("./shared/errors");
/**
* Class for tracking the upload state and displaying stats.
*/
class UploadProgress {
constructor(contentLength) {
this.contentLength = contentLength;
this.sentBytes = 0;
this.displayedComplete = false;
this.startTime = Date.now();
}
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes) {
this.sentBytes = sentBytes;
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes() {
return this.sentBytes;
}
/**
* Returns true if the upload is complete.
*/
isDone() {
return this.getTransferredBytes() === this.contentLength;
}
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display() {
if (this.displayedComplete) {
return;
}
const transferredBytes = this.sentBytes;
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
const elapsedTime = Date.now() - this.startTime;
const uploadSpeed = (transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)).toFixed(1);
core.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`);
if (this.isDone()) {
this.displayedComplete = true;
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress() {
return (progress) => {
this.setSentBytes(progress.loadedBytes);
};
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000) {
const displayCallback = () => {
this.display();
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
};
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer() {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle);
this.timeoutHandle = undefined;
}
this.display();
}
}
exports.UploadProgress = UploadProgress;
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0);
// Specify data transfer options
const uploadOptions = {
blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize,
concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency,
maxSingleShotSize: 128 * 1024 * 1024,
onProgress: uploadProgress.onProgress()
};
try {
uploadProgress.startDisplayTimer();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
// TODO: better management of non-retryable errors
if (response._response.status >= 400) {
throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`);
}
return response;
}
catch (error) {
core.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`);
throw error;
}
finally {
uploadProgress.stopDisplayTimer();
}
});
}
exports.uploadCacheArchiveSDK = uploadCacheArchiveSDK;
//# sourceMappingURL=uploadUtils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"uploadUtils.js","sourceRoot":"","sources":["../../src/internal/uploadUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,sDAK4B;AAE5B,4CAAoD;AAGpD;;GAEG;AACH,MAAa,cAAc;IAOzB,YAAY,aAAqB;QAC/B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAA;QAClC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAA;QAClB,IAAI,CAAC,iBAAiB,GAAG,KAAK,CAAA;QAC9B,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;IAC7B,CAAC;IAED;;;;OAIG;IACH,YAAY,CAAC,SAAiB;QAC5B,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;IAC5B,CAAC;IAED;;OAEG;IACH,mBAAmB;QACjB,OAAO,IAAI,CAAC,SAAS,CAAA;IACvB,CAAC;IAED;;OAEG;IACH,MAAM;QACJ,OAAO,IAAI,CAAC,mBAAmB,EAAE,KAAK,IAAI,CAAC,aAAa,CAAA;IAC1D,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,OAAM;SACP;QAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,SAAS,CAAA;QACvC,MAAM,UAAU,GAAG,CAAC,GAAG,GAAG,CAAC,gBAAgB,GAAG,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,OAAO,CACxE,CAAC,CACF,CAAA;QACD,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,SAAS,CAAA;QAC/C,MAAM,WAAW,GAAG,CAClB,gBAAgB;YAChB,CAAC,IAAI,GAAG,IAAI,CAAC;YACb,CAAC,WAAW,GAAG,IAAI,CAAC,CACrB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;QAEZ,IAAI,CAAC,IAAI,CACP,QAAQ,gBAAgB,OAAO,IAAI,CAAC,aAAa,KAAK,UAAU,OAAO,WAAW,UAAU,CAC7F,CAAA;QAED,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE;YACjB,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAA;SAC9B;IACH,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,CAAC,QAA+B,EAAE,EAAE;YACzC,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAA;QACzC,CAAC,CAAA;IACH,CAAC;IAED;;;;OAIG;IACH,iBAAiB,CAAC,SAAS,GAAG,IAAI;QAChC,MAAM,eAAe,GAAG,GAAS,EAAE;YACjC,IAAI,CAAC,OAAO,EAAE,CAAA;YAEd,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;gBAClB,IAAI,CAAC,aAAa,GAAG,UAAU,CAAC,eAAe,EAAE,SAAS,CAAC,CAAA;aAC5D;QACH,CAAC,CAAA;QAED,IAAI,CAAC,aAAa,GAAG,UAAU,CAAC,eAAe,EAAE,SAAS,CAAC,CAAA;IAC7D,CAAC;IAED;;;;OAIG;IACH,gBAAgB;QACd,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,YAAY,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;YAChC,IAAI,CAAC,aAAa,GAAG,SAAS,CAAA;SAC/B;QAED,IAAI,CAAC,OAAO,EAAE,CAAA;IAChB,CAAC;CACF;AAzGD,wCAyGC;AAED;;;;;;;;;GASG;AACH,SAAsB,qBAAqB,CACzC,eAAuB,EACvB,WAAmB,EACnB,OAAuB;;;QAEvB,MAAM,UAAU,GAAe,IAAI,yBAAU,CAAC,eAAe,CAAC,CAAA;QAC9D,MAAM,eAAe,GAAoB,UAAU,CAAC,kBAAkB,EAAE,CAAA;QACxE,MAAM,cAAc,GAAG,IAAI,cAAc,CAAC,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,mCAAI,CAAC,CAAC,CAAA;QAEzE,gCAAgC;QAChC,MAAM,aAAa,GAAmC;YACpD,SAAS,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe;YACnC,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,iBAAiB;YACvC,iBAAiB,EAAE,GAAG,GAAG,IAAI,GAAG,IAAI;YACpC,UAAU,EAAE,cAAc,CAAC,UAAU,EAAE;SACxC,CAAA;QAED,IAAI;YACF,cAAc,CAAC,iBAAiB,EAAE,CAAA;YAElC,IAAI,CAAC,KAAK,CACR,eAAe,UAAU,CAAC,IAAI,IAAI,UAAU,CAAC,WAAW,IAAI,UAAU,CAAC,aAAa,EAAE,CACvF,CAAA;YAED,MAAM,QAAQ,GAAG,MAAM,eAAe,CAAC,UAAU,CAC/C,WAAW,EACX,aAAa,CACd,CAAA;YAED,kDAAkD;YAClD,IAAI,QAAQ,CAAC,SAAS,CAAC,MAAM,IAAI,GAAG,EAAE;gBACpC,MAAM,IAAI,6BAAoB,CAC5B,yDAAyD,QAAQ,CAAC,SAAS,CAAC,MAAM,EAAE,CACrF,CAAA;aACF;YAED,OAAO,QAAQ,CAAA;SAChB;QAAC,OAAO,KAAK,EAAE;YACd,IAAI,CAAC,OAAO,CACV,kEAAkE,KAAK,CAAC,OAAO,EAAE,CAClF,CAAA;YACD,MAAM,KAAK,CAAA;SACZ;gBAAS;YACR,cAAc,CAAC,gBAAgB,EAAE,CAAA;SAClC;;CACF;AA7CD,sDA6CC"}

View File

@@ -2,6 +2,14 @@
* Options to control cache upload
*/
export interface UploadOptions {
/**
* Indicates whether to use the Azure Blob SDK to download caches
* that are stored on Azure Blob Storage to improve reliability and
* performance
*
* @default false
*/
useAzureSdk?: boolean;
/**
* Number of parallel cache upload
*
@@ -14,6 +22,10 @@ export interface UploadOptions {
* @default 32MB
*/
uploadChunkSize?: number;
/**
* Archive size in bytes
*/
archiveSizeBytes?: number;
}
/**
* Options to control cache download

View File

@@ -31,11 +31,16 @@ const core = __importStar(require("@actions/core"));
* @param copy the original upload options
*/
function getUploadOptions(copy) {
// Defaults if not overriden
const result = {
useAzureSdk: false,
uploadConcurrency: 4,
uploadChunkSize: 32 * 1024 * 1024
};
if (copy) {
if (typeof copy.useAzureSdk === 'boolean') {
result.useAzureSdk = copy.useAzureSdk;
}
if (typeof copy.uploadConcurrency === 'number') {
result.uploadConcurrency = copy.uploadConcurrency;
}
@@ -43,6 +48,18 @@ function getUploadOptions(copy) {
result.uploadChunkSize = copy.uploadChunkSize;
}
}
/**
* Add env var overrides
*/
// Cap the uploadConcurrency at 32
result.uploadConcurrency = !isNaN(Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
? Math.min(32, Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
: result.uploadConcurrency;
// Cap the uploadChunkSize at 128MiB
result.uploadChunkSize = !isNaN(Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']))
? Math.min(128 * 1024 * 1024, Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) * 1024 * 1024)
: result.uploadChunkSize;
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
core.debug(`Upload concurrency: ${result.uploadConcurrency}`);
core.debug(`Upload chunk size: ${result.uploadChunkSize}`);
return result;

View File

@@ -1 +1 @@
{"version":3,"file":"options.js","sourceRoot":"","sources":["../src/options.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AAwErC;;;;GAIG;AACH,SAAgB,gBAAgB,CAAC,IAAoB;IACnD,MAAM,MAAM,GAAkB;QAC5B,iBAAiB,EAAE,CAAC;QACpB,eAAe,EAAE,EAAE,GAAG,IAAI,GAAG,IAAI;KAClC,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,iBAAiB,KAAK,QAAQ,EAAE;YAC9C,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAA;SAClD;QAED,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,QAAQ,EAAE;YAC5C,MAAM,CAAC,eAAe,GAAG,IAAI,CAAC,eAAe,CAAA;SAC9C;KACF;IAED,IAAI,CAAC,KAAK,CAAC,uBAAuB,MAAM,CAAC,iBAAiB,EAAE,CAAC,CAAA;IAC7D,IAAI,CAAC,KAAK,CAAC,sBAAsB,MAAM,CAAC,eAAe,EAAE,CAAC,CAAA;IAE1D,OAAO,MAAM,CAAA;AACf,CAAC;AApBD,4CAoBC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,IAAsB;IACvD,MAAM,MAAM,GAAoB;QAC9B,WAAW,EAAE,KAAK;QAClB,uBAAuB,EAAE,IAAI;QAC7B,mBAAmB,EAAE,CAAC;QACtB,WAAW,EAAE,KAAK;QAClB,kBAAkB,EAAE,MAAM;QAC1B,UAAU,EAAE,KAAK;KAClB,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE;YACzC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,uBAAuB,KAAK,SAAS,EAAE;YACrD,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAA;SAC9D;QAED,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,QAAQ,EAAE;YAChD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;SACtD;QAED,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,QAAQ,EAAE;YACxC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,kBAAkB,KAAK,QAAQ,EAAE;YAC/C,MAAM,CAAC,kBAAkB,GAAG,IAAI,CAAC,kBAAkB,CAAA;SACpD;QAED,IAAI,OAAO,IAAI,CAAC,UAAU,KAAK,SAAS,EAAE;YACxC,MAAM,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA;SACpC;KACF;IACD,MAAM,0BAA0B,GAC9B,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,CAAA;IAE9C,IACE,0BAA0B;QAC1B,CAAC,KAAK,CAAC,MAAM,CAAC,0BAA0B,CAAC,CAAC;QAC1C,QAAQ,CAAC,MAAM,CAAC,0BAA0B,CAAC,CAAC,EAC5C;QACA,MAAM,CAAC,kBAAkB,GAAG,MAAM,CAAC,0BAA0B,CAAC,GAAG,EAAE,GAAG,IAAI,CAAA;KAC3E;IACD,IAAI,CAAC,KAAK,CAAC,kBAAkB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,mBAAmB,EAAE,CAAC,CAAA;IACjE,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IACzD,IAAI,CAAC,KAAK,CACR,gDAAgD,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,EAAE,CAC/F,CAAA;IACD,IAAI,CAAC,KAAK,CAAC,kCAAkC,MAAM,CAAC,kBAAkB,EAAE,CAAC,CAAA;IACzE,IAAI,CAAC,KAAK,CAAC,gBAAgB,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;IAE/C,OAAO,MAAM,CAAA;AACf,CAAC;AAvDD,gDAuDC"}
{"version":3,"file":"options.js","sourceRoot":"","sources":["../src/options.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AAoFrC;;;;GAIG;AACH,SAAgB,gBAAgB,CAAC,IAAoB;IACnD,4BAA4B;IAC5B,MAAM,MAAM,GAAkB;QAC5B,WAAW,EAAE,KAAK;QAClB,iBAAiB,EAAE,CAAC;QACpB,eAAe,EAAE,EAAE,GAAG,IAAI,GAAG,IAAI;KAClC,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE;YACzC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,iBAAiB,KAAK,QAAQ,EAAE;YAC9C,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAA;SAClD;QAED,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,QAAQ,EAAE;YAC5C,MAAM,CAAC,eAAe,GAAG,IAAI,CAAC,eAAe,CAAA;SAC9C;KACF;IAED;;OAEG;IACH,kCAAkC;IAClC,MAAM,CAAC,iBAAiB,GAAG,CAAC,KAAK,CAC/B,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,0BAA0B,CAAC,CAAC,CAChD;QACC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,0BAA0B,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,MAAM,CAAC,iBAAiB,CAAA;IAC5B,oCAAoC;IACpC,MAAM,CAAC,eAAe,GAAG,CAAC,KAAK,CAC7B,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,yBAAyB,CAAC,CAAC,CAC/C;QACC,CAAC,CAAC,IAAI,CAAC,GAAG,CACN,GAAG,GAAG,IAAI,GAAG,IAAI,EACjB,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,yBAAyB,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAC7D;QACH,CAAC,CAAC,MAAM,CAAC,eAAe,CAAA;IAE1B,IAAI,CAAC,KAAK,CAAC,kBAAkB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,CAAC,uBAAuB,MAAM,CAAC,iBAAiB,EAAE,CAAC,CAAA;IAC7D,IAAI,CAAC,KAAK,CAAC,sBAAsB,MAAM,CAAC,eAAe,EAAE,CAAC,CAAA;IAE1D,OAAO,MAAM,CAAA;AACf,CAAC;AA9CD,4CA8CC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,IAAsB;IACvD,MAAM,MAAM,GAAoB;QAC9B,WAAW,EAAE,KAAK;QAClB,uBAAuB,EAAE,IAAI;QAC7B,mBAAmB,EAAE,CAAC;QACtB,WAAW,EAAE,KAAK;QAClB,kBAAkB,EAAE,MAAM;QAC1B,UAAU,EAAE,KAAK;KAClB,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE;YACzC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,uBAAuB,KAAK,SAAS,EAAE;YACrD,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAA;SAC9D;QAED,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,QAAQ,EAAE;YAChD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;SACtD;QAED,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,QAAQ,EAAE;YACxC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,kBAAkB,KAAK,QAAQ,EAAE;YAC/C,MAAM,CAAC,kBAAkB,GAAG,IAAI,CAAC,kBAAkB,CAAA;SACpD;QAED,IAAI,OAAO,IAAI,CAAC,UAAU,KAAK,SAAS,EAAE;YACxC,MAAM,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA;SACpC;KACF;IACD,MAAM,0BAA0B,GAC9B,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,CAAA;IAE9C,IACE,0BAA0B;QAC1B,CAAC,KAAK,CAAC,MAAM,CAAC,0BAA0B,CAAC,CAAC;QAC1C,QAAQ,CAAC,MAAM,CAAC,0BAA0B,CAAC,CAAC,EAC5C;QACA,MAAM,CAAC,kBAAkB,GAAG,MAAM,CAAC,0BAA0B,CAAC,GAAG,EAAE,GAAG,IAAI,CAAA;KAC3E;IACD,IAAI,CAAC,KAAK,CAAC,kBAAkB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,mBAAmB,EAAE,CAAC,CAAA;IACjE,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IACzD,IAAI,CAAC,KAAK,CACR,gDAAgD,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,EAAE,CAC/F,CAAA;IACD,IAAI,CAAC,KAAK,CAAC,kCAAkC,MAAM,CAAC,kBAAkB,EAAE,CAAC,CAAA;IACzE,IAAI,CAAC,KAAK,CAAC,gBAAgB,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;IAE/C,OAAO,MAAM,CAAA;AACf,CAAC;AAvDD,gDAuDC"}

View File

@@ -1,6 +1,6 @@
{
"name": "@actions/cache",
"version": "3.3.0",
"version": "4.0.0",
"preview": true,
"description": "Actions cache lib",
"keywords": [
@@ -45,10 +45,12 @@
"@azure/abort-controller": "^1.1.0",
"@azure/ms-rest-js": "^2.6.0",
"@azure/storage-blob": "^12.13.0",
"semver": "^6.3.1"
"@protobuf-ts/plugin": "^2.9.4",
"semver": "^6.3.1",
"twirp-ts": "^2.5.0"
},
"devDependencies": {
"@types/semver": "^6.0.0",
"typescript": "^5.2.2"
}
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@eslint/js",
"version": "9.15.0",
"version": "9.16.0",
"description": "ESLint JavaScript language implementation",
"main": "./src/index.js",
"types": "./types/index.d.ts",

View File

@@ -1,4 +1,3 @@
/// <reference types="node" />
import type { Fetch } from "./Fetch.js";
/**
* Octokit-specific request options which are ignored for the actual request, but can be used by Octokit or plugins to manipulate how the request is sent or how a response is handled

View File

@@ -1 +1 @@
export declare const VERSION = "13.6.1";
export declare const VERSION = "13.6.2";

View File

@@ -1,6 +1,6 @@
{
"name": "@octokit/types",
"version": "13.6.1",
"version": "13.6.2",
"publishConfig": {
"access": "public",
"provenance": true
@@ -21,18 +21,13 @@
"license": "MIT",
"devDependencies": {
"@octokit/tsconfig": "^4.0.0",
"@types/node": ">= 8",
"github-openapi-graphql-query": "^4.0.0",
"handlebars": "^4.7.6",
"json-schema-to-typescript": "^15.0.0",
"lodash.set": "^4.3.2",
"npm-run-all2": "^6.0.0",
"pascal-case": "^4.0.0",
"npm-run-all2": "^7.0.0",
"prettier": "^3.0.0",
"semantic-release": "^24.0.0",
"semantic-release-plugin-update-version-in-files": "^1.0.0",
"sort-keys": "^5.0.0",
"string-to-jsdoc-comment": "^1.0.0",
"typedoc": "^0.26.0",
"typescript": "^5.0.0"
},

4
node_modules/@pkgr/core/lib/constants.d.ts generated vendored Normal file
View File

@@ -0,0 +1,4 @@
/// <reference types="node" />
export declare const CWD: string;
export declare const cjsRequire: NodeRequire;
export declare const EXTENSIONS: string[];

5
node_modules/@pkgr/core/lib/constants.js generated vendored Normal file
View File

@@ -0,0 +1,5 @@
import { createRequire } from 'node:module';
export const CWD = process.cwd();
export const cjsRequire = typeof require === 'undefined' ? createRequire(import.meta.url) : require;
export const EXTENSIONS = ['.ts', '.tsx', ...Object.keys(cjsRequire.extensions)];
//# sourceMappingURL=constants.js.map

1
node_modules/@pkgr/core/lib/constants.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../src/constants.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,aAAa,CAAA;AAE3C,MAAM,CAAC,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAA;AAEhC,MAAM,CAAC,MAAM,UAAU,GACrB,OAAO,OAAO,KAAK,WAAW,CAAC,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAA;AAG3E,MAAM,CAAC,MAAM,UAAU,GAAG,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC,CAAA"}

5
node_modules/@pkgr/core/lib/helpers.d.ts generated vendored Normal file
View File

@@ -0,0 +1,5 @@
export declare const tryPkg: (pkg: string) => string | undefined;
export declare const isPkgAvailable: (pkg: string) => boolean;
export declare const tryFile: (filePath?: string[] | string, includeDir?: boolean) => string;
export declare const tryExtensions: (filepath: string, extensions?: string[]) => string;
export declare const findUp: (searchEntry: string, searchFileOrIncludeDir?: boolean | string, includeDir?: boolean) => string;

49
node_modules/@pkgr/core/lib/helpers.js generated vendored Normal file
View File

@@ -0,0 +1,49 @@
import fs from 'node:fs';
import path from 'node:path';
import { CWD, EXTENSIONS, cjsRequire } from './constants.js';
export const tryPkg = (pkg) => {
try {
return cjsRequire.resolve(pkg);
}
catch (_a) { }
};
export const isPkgAvailable = (pkg) => !!tryPkg(pkg);
export const tryFile = (filePath, includeDir = false) => {
if (typeof filePath === 'string') {
return fs.existsSync(filePath) &&
(includeDir || fs.statSync(filePath).isFile())
? filePath
: '';
}
for (const file of filePath !== null && filePath !== void 0 ? filePath : []) {
if (tryFile(file, includeDir)) {
return file;
}
}
return '';
};
export const tryExtensions = (filepath, extensions = EXTENSIONS) => {
const ext = [...extensions, ''].find(ext => tryFile(filepath + ext));
return ext == null ? '' : filepath + ext;
};
export const findUp = (searchEntry, searchFileOrIncludeDir, includeDir) => {
console.assert(path.isAbsolute(searchEntry));
if (!tryFile(searchEntry, true) ||
(searchEntry !== CWD && !searchEntry.startsWith(CWD + path.sep))) {
return '';
}
searchEntry = path.resolve(fs.statSync(searchEntry).isDirectory()
? searchEntry
: path.resolve(searchEntry, '..'));
const isSearchFile = typeof searchFileOrIncludeDir === 'string';
const searchFile = isSearchFile ? searchFileOrIncludeDir : 'package.json';
do {
const searched = tryFile(path.resolve(searchEntry, searchFile), isSearchFile && includeDir);
if (searched) {
return searched;
}
searchEntry = path.resolve(searchEntry, '..');
} while (searchEntry === CWD || searchEntry.startsWith(CWD + path.sep));
return '';
};
//# sourceMappingURL=helpers.js.map

1
node_modules/@pkgr/core/lib/helpers.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"helpers.js","sourceRoot":"","sources":["../src/helpers.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,SAAS,CAAA;AACxB,OAAO,IAAI,MAAM,WAAW,CAAA;AAE5B,OAAO,EAAE,GAAG,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAA;AAE5D,MAAM,CAAC,MAAM,MAAM,GAAG,CAAC,GAAW,EAAE,EAAE;IACpC,IAAI,CAAC;QACH,OAAO,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;IAChC,CAAC;IAAC,WAAM,CAAC,CAAA,CAAC;AACZ,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,cAAc,GAAG,CAAC,GAAW,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;AAE5D,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,QAA4B,EAAE,UAAU,GAAG,KAAK,EAAE,EAAE;IAC1E,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE,CAAC;QACjC,OAAO,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;YAC5B,CAAC,UAAU,IAAI,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,MAAM,EAAE,CAAC;YAC9C,CAAC,CAAC,QAAQ;YACV,CAAC,CAAC,EAAE,CAAA;IACR,CAAC;IAED,KAAK,MAAM,IAAI,IAAI,QAAQ,aAAR,QAAQ,cAAR,QAAQ,GAAI,EAAE,EAAE,CAAC;QAClC,IAAI,OAAO,CAAC,IAAI,EAAE,UAAU,CAAC,EAAE,CAAC;YAC9B,OAAO,IAAI,CAAA;QACb,CAAC;IACH,CAAC;IAED,OAAO,EAAE,CAAA;AACX,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,QAAgB,EAAE,UAAU,GAAG,UAAU,EAAE,EAAE;IACzE,MAAM,GAAG,GAAG,CAAC,GAAG,UAAU,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,OAAO,CAAC,QAAQ,GAAG,GAAG,CAAC,CAAC,CAAA;IACpE,OAAO,GAAG,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,GAAG,GAAG,CAAA;AAC1C,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,MAAM,GAAG,CACpB,WAAmB,EACnB,sBAAyC,EACzC,UAAoB,EACpB,EAAE;IACF,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC,CAAC,CAAA;IAE5C,IACE,CAAC,OAAO,CAAC,WAAW,EAAE,IAAI,CAAC;QAC3B,CAAC,WAAW,KAAK,GAAG,IAAI,CAAC,WAAW,CAAC,UAAU,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAChE,CAAC;QACD,OAAO,EAAE,CAAA;IACX,CAAC;IAED,WAAW,GAAG,IAAI,CAAC,OAAO,CACxB,EAAE,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,WAAW,EAAE;QACpC,CAAC,CAAC,WAAW;QACb,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,IAAI,CAAC,CACpC,CAAA;IAED,MAAM,YAAY,GAAG,OAAO,sBAAsB,KAAK,QAAQ,CAAA;IAE/D,MAAM,UAAU,GAAG,YAAY,CAAC,CAAC,CAAC,sBAAsB,CAAC,CAAC,CAAC,cAAc,CAAA;IAEzE,GAAG,CAAC;QACF,MAAM,QAAQ,GAAG,OAAO,CACtB,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,UAAU,CAAC,EACrC,YAAY,IAAI,UAAU,CAC3B,CAAA;QACD,IAAI,QAAQ,EAAE,CAAC;YACb,OAAO,QAAQ,CAAA;QACjB,CAAC;QACD,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,IAAI,CAAC,CAAA;IAC/C,CAAC,QAAQ,WAAW,KAAK,GAAG,IAAI,WAAW,CAAC,UAAU,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,EAAC;IAEvE,OAAO,EAAE,CAAA;AACX,CAAC,CAAA"}

64
node_modules/@pkgr/core/lib/index.cjs generated vendored Normal file
View File

@@ -0,0 +1,64 @@
'use strict';
var node_module = require('node:module');
var fs = require('node:fs');
var path = require('node:path');
const import_meta = {};
const CWD = process.cwd();
const cjsRequire = typeof require === "undefined" ? node_module.createRequire(import_meta.url) : require;
const EXTENSIONS = [".ts", ".tsx", ...Object.keys(cjsRequire.extensions)];
const tryPkg = (pkg) => {
try {
return cjsRequire.resolve(pkg);
} catch (e) {
}
};
const isPkgAvailable = (pkg) => !!tryPkg(pkg);
const tryFile = (filePath, includeDir = false) => {
if (typeof filePath === "string") {
return fs.existsSync(filePath) && (includeDir || fs.statSync(filePath).isFile()) ? filePath : "";
}
for (const file of filePath != null ? filePath : []) {
if (tryFile(file, includeDir)) {
return file;
}
}
return "";
};
const tryExtensions = (filepath, extensions = EXTENSIONS) => {
const ext = [...extensions, ""].find((ext2) => tryFile(filepath + ext2));
return ext == null ? "" : filepath + ext;
};
const findUp = (searchEntry, searchFileOrIncludeDir, includeDir) => {
console.assert(path.isAbsolute(searchEntry));
if (!tryFile(searchEntry, true) || searchEntry !== CWD && !searchEntry.startsWith(CWD + path.sep)) {
return "";
}
searchEntry = path.resolve(
fs.statSync(searchEntry).isDirectory() ? searchEntry : path.resolve(searchEntry, "..")
);
const isSearchFile = typeof searchFileOrIncludeDir === "string";
const searchFile = isSearchFile ? searchFileOrIncludeDir : "package.json";
do {
const searched = tryFile(
path.resolve(searchEntry, searchFile),
isSearchFile && includeDir
);
if (searched) {
return searched;
}
searchEntry = path.resolve(searchEntry, "..");
} while (searchEntry === CWD || searchEntry.startsWith(CWD + path.sep));
return "";
};
exports.CWD = CWD;
exports.EXTENSIONS = EXTENSIONS;
exports.cjsRequire = cjsRequire;
exports.findUp = findUp;
exports.isPkgAvailable = isPkgAvailable;
exports.tryExtensions = tryExtensions;
exports.tryFile = tryFile;
exports.tryPkg = tryPkg;

View File

@@ -1,4 +1,2 @@
export * from './browser.js';
export * from './constants.js';
export * from './helpers.js';
export * from './monorepo.js';

View File

@@ -1,5 +1,3 @@
export * from './browser.js';
export * from './constants.js';
export * from './helpers.js';
export * from './monorepo.js';
//# sourceMappingURL=index.js.map

1
node_modules/@pkgr/core/lib/index.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,gBAAgB,CAAA;AAC9B,cAAc,cAAc,CAAA"}

View File

@@ -1,10 +1,10 @@
{
"name": "@pkgr/utils",
"version": "2.4.2",
"name": "@pkgr/core",
"version": "0.1.1",
"type": "module",
"description": "Shared utils for `@pkgr` packages or any package else",
"description": "Shared core module for `@pkgr` packages or any package else",
"repository": "git+https://github.com/un-ts/pkgr.git",
"homepage": "https://github.com/un-ts/pkgr/blob/master/packages/utils",
"homepage": "https://github.com/un-ts/pkgr/blob/master/packages/core",
"author": "JounQin (https://www.1stG.me) <admin@1stg.me>",
"funding": "https://opencollective.com/unts",
"license": "MIT",
@@ -20,17 +20,8 @@
},
"types": "./lib/index.d.ts",
"files": [
"lib",
"openChrome.applescript"
"lib"
],
"dependencies": {
"cross-spawn": "^7.0.3",
"fast-glob": "^3.3.0",
"is-glob": "^4.0.3",
"open": "^9.1.0",
"picocolors": "^1.0.0",
"tslib": "^2.6.0"
},
"publishConfig": {
"access": "public"
},

View File

@@ -1 +0,0 @@
export declare function openBrowser(url: string): Promise<boolean>;

View File

@@ -1,122 +0,0 @@
import { __awaiter } from "tslib";
import { execSync } from 'node:child_process';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import spawn from 'cross-spawn';
import picocolors from 'picocolors';
const OSX_CHROME = 'google chrome';
var Action;
(function (Action) {
Action[Action["NONE"] = 0] = "NONE";
Action[Action["BROWSER"] = 1] = "BROWSER";
Action[Action["SCRIPT"] = 2] = "SCRIPT";
})(Action || (Action = {}));
function getBrowserEnv() {
const value = process.env.BROWSER;
const args = process.env.BROWSER_ARGS
? process.env.BROWSER_ARGS.split(' ')
: [];
let action;
if (!value) {
action = Action.BROWSER;
}
else if (value.toLowerCase().endsWith('.js')) {
action = Action.SCRIPT;
}
else if (value.toLowerCase() === 'none') {
action = Action.NONE;
}
else {
action = Action.BROWSER;
}
return { action, value, args };
}
function executeNodeScript(scriptPath, url) {
const extraArgs = process.argv.slice(2);
const child = spawn(process.execPath, [scriptPath, ...extraArgs, url], {
stdio: 'inherit',
});
child.on('close', code => {
if (code !== 0) {
console.log();
console.log(picocolors.red('The script specified as BROWSER environment variable failed.'));
console.log(`${picocolors.cyan(scriptPath)} exited with code ${code}`);
console.log();
}
});
return true;
}
function startBrowserProcess(browser, url, args) {
return __awaiter(this, void 0, void 0, function* () {
const shouldTryOpenChromiumWithAppleScript = process.platform === 'darwin' &&
(typeof browser !== 'string' || browser === OSX_CHROME);
if (shouldTryOpenChromiumWithAppleScript) {
const supportedChromiumBrowsers = [
'Google Chrome Canary',
'Google Chrome',
'Microsoft Edge',
'Brave Browser',
'Vivaldi',
'Chromium',
];
const _dirname = typeof __dirname === 'undefined'
? path.dirname(fileURLToPath(import.meta.url))
: __dirname;
for (const chromiumBrowser of supportedChromiumBrowsers) {
try {
execSync('ps cax | grep "' + chromiumBrowser + '"');
execSync('osascript ../openChrome.applescript "' +
encodeURI(url) +
'" "' +
chromiumBrowser +
'"', {
cwd: _dirname,
stdio: 'ignore',
});
return true;
}
catch (_a) {
}
}
}
if (process.platform === 'darwin' && browser === 'open') {
browser = undefined;
}
try {
const open = (yield import('open')).default;
open(url, {
app: browser
? {
name: browser,
arguments: args,
}
: undefined,
wait: false,
}).catch(() => { });
return true;
}
catch (_b) {
return false;
}
});
}
export function openBrowser(url) {
return __awaiter(this, void 0, void 0, function* () {
const { action, value, args } = getBrowserEnv();
switch (action) {
case Action.NONE: {
return false;
}
case Action.SCRIPT: {
return executeNodeScript(value, url);
}
case Action.BROWSER: {
return startBrowserProcess(value, url, args);
}
default: {
throw new Error('Not implemented.');
}
}
});
}
//# sourceMappingURL=browser.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"browser.js","sourceRoot":"","sources":["../src/browser.ts"],"names":[],"mappings":";AAEA,OAAO,EAAE,QAAQ,EAAE,MAAM,oBAAoB,CAAA;AAC7C,OAAO,IAAI,MAAM,WAAW,CAAA;AAC5B,OAAO,EAAE,aAAa,EAAE,MAAM,UAAU,CAAA;AAExC,OAAO,KAAK,MAAM,aAAa,CAAA;AAC/B,OAAO,UAAU,MAAM,YAAY,CAAA;AAGnC,MAAM,UAAU,GAAG,eAAe,CAAA;AAElC,IAAK,MAIJ;AAJD,WAAK,MAAM;IACT,mCAAI,CAAA;IACJ,yCAAO,CAAA;IACP,uCAAM,CAAA;AACR,CAAC,EAJI,MAAM,KAAN,MAAM,QAIV;AAED,SAAS,aAAa;IAIpB,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,OAAO,CAAA;IACjC,MAAM,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY;QACnC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,KAAK,CAAC,GAAG,CAAC;QACrC,CAAC,CAAC,EAAE,CAAA;IACN,IAAI,MAAc,CAAA;IAClB,IAAI,CAAC,KAAK,EAAE;QAEV,MAAM,GAAG,MAAM,CAAC,OAAO,CAAA;KACxB;SAAM,IAAI,KAAK,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;QAC9C,MAAM,GAAG,MAAM,CAAC,MAAM,CAAA;KACvB;SAAM,IAAI,KAAK,CAAC,WAAW,EAAE,KAAK,MAAM,EAAE;QACzC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAA;KACrB;SAAM;QACL,MAAM,GAAG,MAAM,CAAC,OAAO,CAAA;KACxB;IACD,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,CAAA;AAChC,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,GAAW;IACxD,MAAM,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;IACvC,MAAM,KAAK,GAAG,KAAK,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,UAAU,EAAE,GAAG,SAAS,EAAE,GAAG,CAAC,EAAE;QACrE,KAAK,EAAE,SAAS;KACjB,CAAC,CAAA;IACF,KAAK,CAAC,EAAE,CAAC,OAAO,EAAE,IAAI,CAAC,EAAE;QACvB,IAAI,IAAI,KAAK,CAAC,EAAE;YACd,OAAO,CAAC,GAAG,EAAE,CAAA;YACb,OAAO,CAAC,GAAG,CACT,UAAU,CAAC,GAAG,CACZ,8DAA8D,CAC/D,CACF,CAAA;YACD,OAAO,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,IAAI,CAAC,UAAU,CAAC,qBAAqB,IAAK,EAAE,CAAC,CAAA;YACvE,OAAO,CAAC,GAAG,EAAE,CAAA;SACd;IACH,CAAC,CAAC,CAAA;IACF,OAAO,IAAI,CAAA;AACb,CAAC;AAED,SAAe,mBAAmB,CAChC,OAAsC,EACtC,GAAW,EACX,IAAc;;QAMd,MAAM,oCAAoC,GACxC,OAAO,CAAC,QAAQ,KAAK,QAAQ;YAC7B,CAAC,OAAO,OAAO,KAAK,QAAQ,IAAI,OAAO,KAAK,UAAU,CAAC,CAAA;QAEzD,IAAI,oCAAoC,EAAE;YAExC,MAAM,yBAAyB,GAAG;gBAChC,sBAAsB;gBACtB,eAAe;gBACf,gBAAgB;gBAChB,eAAe;gBACf,SAAS;gBACT,UAAU;aACX,CAAA;YAED,MAAM,QAAQ,GACZ,OAAO,SAAS,KAAK,WAAW;gBAC9B,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;gBAC9C,CAAC,CAAC,SAAS,CAAA;YAEf,KAAK,MAAM,eAAe,IAAI,yBAAyB,EAAE;gBACvD,IAAI;oBAGF,QAAQ,CAAC,iBAAiB,GAAG,eAAe,GAAG,GAAG,CAAC,CAAA;oBACnD,QAAQ,CACN,uCAAuC;wBACrC,SAAS,CAAC,GAAG,CAAC;wBACd,KAAK;wBACL,eAAe;wBACf,GAAG,EACL;wBACE,GAAG,EAAE,QAAQ;wBACb,KAAK,EAAE,QAAQ;qBAChB,CACF,CAAA;oBACD,OAAO,IAAI,CAAA;iBACZ;gBAAC,WAAM;iBAEP;aACF;SACF;QAMD,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,IAAI,OAAO,KAAK,MAAM,EAAE;YACvD,OAAO,GAAG,SAAS,CAAA;SACpB;QAID,IAAI;YAEF,MAAM,IAAI,GAAG,CAAC,MAAM,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAA;YAC3C,IAAI,CAAC,GAAG,EAAE;gBACR,GAAG,EAAE,OAAO;oBACV,CAAC,CAAC;wBACE,IAAI,EAAE,OAAO;wBACb,SAAS,EAAE,IAAI;qBAChB;oBACH,CAAC,CAAC,SAAS;gBACb,IAAI,EAAE,KAAK;aACZ,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAA;YAClB,OAAO,IAAI,CAAA;SACZ;QAAC,WAAM;YACN,OAAO,KAAK,CAAA;SACb;IACH,CAAC;CAAA;AAMD,MAAM,UAAgB,WAAW,CAAC,GAAW;;QAC3C,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,GAAG,aAAa,EAAE,CAAA;QAC/C,QAAQ,MAAM,EAAE;YACd,KAAK,MAAM,CAAC,IAAI,CAAC,CAAC;gBAEhB,OAAO,KAAK,CAAA;aACb;YACD,KAAK,MAAM,CAAC,MAAM,CAAC,CAAC;gBAClB,OAAO,iBAAiB,CAAC,KAAM,EAAE,GAAG,CAAC,CAAA;aACtC;YACD,KAAK,MAAM,CAAC,OAAO,CAAC,CAAC;gBACnB,OAAO,mBAAmB,CAAC,KAAK,EAAE,GAAG,EAAE,IAAI,CAAC,CAAA;aAC7C;YACD,OAAO,CAAC,CAAC;gBACP,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC,CAAA;aACpC;SACF;IACH,CAAC;CAAA"}

View File

@@ -1,20 +0,0 @@
/// <reference types="node" />
export declare const DEV: "development";
export declare const PROD: "production";
export declare const NODE_ENV: string;
export declare const __DEV__: boolean;
export declare const __PROD__: boolean;
export declare const NODE_MODULES_REG: RegExp;
export declare const CWD: string;
export declare const cjsRequire: NodeRequire;
export declare const EXTENSIONS: string[];
export declare const SCRIPT_RUNNERS: {
readonly npm: "npx";
readonly pnpm: "pnpm";
readonly yarn: "yarn";
};
export declare const SCRIPT_EXECUTORS: {
readonly npm: "npx";
readonly pnpm: "pnpx";
readonly yarn: "yarn dlx";
};

View File

@@ -1,22 +0,0 @@
var _a;
import { createRequire } from 'node:module';
export const DEV = 'development';
export const PROD = 'production';
export const NODE_ENV = (_a = process.env.NODE_ENV) !== null && _a !== void 0 ? _a : DEV;
export const __DEV__ = NODE_ENV === DEV;
export const __PROD__ = NODE_ENV === PROD;
export const NODE_MODULES_REG = /[/\\]node_modules[/\\]/;
export const CWD = process.cwd();
export const cjsRequire = typeof require === 'undefined' ? createRequire(import.meta.url) : require;
export const EXTENSIONS = ['.ts', '.tsx', ...Object.keys(cjsRequire.extensions)];
export const SCRIPT_RUNNERS = {
npm: 'npx',
pnpm: 'pnpm',
yarn: 'yarn',
};
export const SCRIPT_EXECUTORS = {
npm: 'npx',
pnpm: 'pnpx',
yarn: 'yarn dlx',
};
//# sourceMappingURL=constants.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../src/constants.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,aAAa,CAAA;AAE3C,MAAM,CAAC,MAAM,GAAG,GAAG,aAAsB,CAAA;AACzC,MAAM,CAAC,MAAM,IAAI,GAAG,YAAqB,CAAA;AAEzC,MAAM,CAAC,MAAM,QAAQ,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,QAAQ,mCAAI,GAAG,CAAA;AAEnD,MAAM,CAAC,MAAM,OAAO,GAAG,QAAQ,KAAK,GAAG,CAAA;AACvC,MAAM,CAAC,MAAM,QAAQ,GAAG,QAAQ,KAAK,IAAI,CAAA;AAEzC,MAAM,CAAC,MAAM,gBAAgB,GAAG,wBAAwB,CAAA;AAExD,MAAM,CAAC,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAA;AAEhC,MAAM,CAAC,MAAM,UAAU,GACrB,OAAO,OAAO,KAAK,WAAW,CAAC,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAA;AAG3E,MAAM,CAAC,MAAM,UAAU,GAAG,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC,CAAA;AAEhF,MAAM,CAAC,MAAM,cAAc,GAAG;IAC5B,GAAG,EAAE,KAAK;IACV,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,MAAM;CACJ,CAAA;AAEV,MAAM,CAAC,MAAM,gBAAgB,GAAG;IAC9B,GAAG,EAAE,KAAK;IACV,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,UAAU;CACR,CAAA"}

View File

@@ -1,22 +0,0 @@
export declare const tryPkg: (pkg: string) => string | undefined;
export declare const tryRequirePkg: <T>(pkg: string) => T | undefined;
export declare const isPkgAvailable: (pkg: string) => boolean;
export declare const isTsAvailable: boolean;
export declare const isAngularAvailable: boolean;
export declare const isMdxAvailable: boolean;
export declare const isReactAvailable: boolean;
export declare const isSvelteAvailable: boolean;
export declare const isVueAvailable: boolean;
export declare const tryFile: (filePath?: string[] | string, includeDir?: boolean) => string;
export declare const tryExtensions: (filepath: string, extensions?: string[]) => string;
export declare const tryGlob: (paths: string[], options?: string | {
absolute?: boolean;
baseDir?: string;
ignore?: [string];
}) => string[];
export declare const identify: <T>(_: T) => _ is Exclude<T, "" | (T extends boolean ? false : boolean) | null | undefined>;
export declare const findUp: (searchEntry: string, searchFile?: string) => string;
export declare const arrayify: <T, R = T extends (infer S)[] ? NonNullable<S> : NonNullable<T>>(...args: (R | R[])[]) => R[];
export declare const getPackageManager: () => "pnpm" | "yarn" | "npm" | undefined;
export declare const getScriptRunner: () => "npx" | "pnpm" | "yarn" | undefined;
export declare const getScriptExecutor: () => "npx" | "pnpx" | "yarn dlx" | undefined;

View File

@@ -1,111 +0,0 @@
import fs from 'node:fs';
import path from 'node:path';
import isGlob from 'is-glob';
import { CWD, EXTENSIONS, cjsRequire, SCRIPT_RUNNERS, SCRIPT_EXECUTORS, } from './constants.js';
export const tryPkg = (pkg) => {
try {
return cjsRequire.resolve(pkg);
}
catch (_a) { }
};
export const tryRequirePkg = (pkg) => {
try {
return cjsRequire(pkg);
}
catch (_a) { }
};
export const isPkgAvailable = (pkg) => !!tryPkg(pkg);
export const isTsAvailable = isPkgAvailable('typescript');
export const isAngularAvailable = isPkgAvailable('@angular/core/package.json');
export const isMdxAvailable = isPkgAvailable('@mdx-js/mdx/package.json') ||
isPkgAvailable('@mdx-js/react/package.json');
export const isReactAvailable = isPkgAvailable('react');
export const isSvelteAvailable = isPkgAvailable('svelte');
export const isVueAvailable = isPkgAvailable('vue');
export const tryFile = (filePath, includeDir = false) => {
if (typeof filePath === 'string') {
return fs.existsSync(filePath) &&
(includeDir || fs.statSync(filePath).isFile())
? filePath
: '';
}
for (const file of filePath !== null && filePath !== void 0 ? filePath : []) {
if (tryFile(file, includeDir)) {
return file;
}
}
return '';
};
export const tryExtensions = (filepath, extensions = EXTENSIONS) => {
const ext = [...extensions, ''].find(ext => tryFile(filepath + ext));
return ext == null ? '' : filepath + ext;
};
export const tryGlob = (paths, options = {}) => {
const { absolute = true, baseDir = CWD, ignore = ['**/node_modules/**'], } = typeof options === 'string' ? { baseDir: options } : options;
return paths.reduce((acc, pkg) => [
...acc,
...(isGlob(pkg)
? tryRequirePkg('fast-glob')
.sync(pkg, {
cwd: baseDir,
ignore,
onlyFiles: false,
})
.map(file => (absolute ? path.resolve(baseDir, file) : file))
: [tryFile(path.resolve(baseDir, pkg), true)]),
].filter(Boolean), []);
};
export const identify = (_) => !!_;
export const findUp = (searchEntry, searchFile = 'package.json') => {
console.assert(path.isAbsolute(searchEntry));
if (!tryFile(searchEntry, true) ||
(searchEntry !== CWD && !searchEntry.startsWith(CWD + path.sep))) {
return '';
}
searchEntry = path.resolve(fs.statSync(searchEntry).isDirectory()
? searchEntry
: path.resolve(searchEntry, '..'));
do {
const searched = tryFile(path.resolve(searchEntry, searchFile));
if (searched) {
return searched;
}
searchEntry = path.resolve(searchEntry, '..');
} while (searchEntry === CWD || searchEntry.startsWith(CWD + path.sep));
return '';
};
export const arrayify = (...args) => args.reduce((arr, curr) => {
arr.push(...(Array.isArray(curr) ? curr : curr == null ? [] : [curr]));
return arr;
}, []);
export const getPackageManager = () => {
const execPath = process.env.npm_execpath;
if (!execPath) {
return;
}
if (/\byarn\b/.test(execPath)) {
return 'yarn';
}
if (/\bpnpm\b/.test(execPath)) {
return 'pnpm';
}
if (/\bnpm\b/.test(execPath)) {
return 'npm';
}
console.warn('unknown package manager:', execPath);
};
export const getScriptRunner = () => {
const pm = getPackageManager();
if (!pm) {
return;
}
return SCRIPT_RUNNERS[pm];
};
export const getScriptExecutor = () => {
const pm = getPackageManager();
if (!pm) {
return;
}
return SCRIPT_EXECUTORS[pm];
};
//# sourceMappingURL=helpers.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"helpers.js","sourceRoot":"","sources":["../src/helpers.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,SAAS,CAAA;AACxB,OAAO,IAAI,MAAM,WAAW,CAAA;AAE5B,OAAO,MAAM,MAAM,SAAS,CAAA;AAE5B,OAAO,EACL,GAAG,EACH,UAAU,EACV,UAAU,EACV,cAAc,EACd,gBAAgB,GACjB,MAAM,gBAAgB,CAAA;AAEvB,MAAM,CAAC,MAAM,MAAM,GAAG,CAAC,GAAW,EAAE,EAAE;IACpC,IAAI;QACF,OAAO,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;KAC/B;IAAC,WAAM,GAAE;AACZ,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,aAAa,GAAG,CAAI,GAAW,EAAiB,EAAE;IAC7D,IAAI;QAEF,OAAO,UAAU,CAAC,GAAG,CAAC,CAAA;KACvB;IAAC,WAAM,GAAE;AACZ,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,cAAc,GAAG,CAAC,GAAW,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;AAE5D,MAAM,CAAC,MAAM,aAAa,GAAG,cAAc,CAAC,YAAY,CAAC,CAAA;AAEzD,MAAM,CAAC,MAAM,kBAAkB,GAAG,cAAc,CAAC,4BAA4B,CAAC,CAAA;AAE9E,MAAM,CAAC,MAAM,cAAc,GACzB,cAAc,CAAC,0BAA0B,CAAC;IAC1C,cAAc,CAAC,4BAA4B,CAAC,CAAA;AAE9C,MAAM,CAAC,MAAM,gBAAgB,GAAG,cAAc,CAAC,OAAO,CAAC,CAAA;AAEvD,MAAM,CAAC,MAAM,iBAAiB,GAAG,cAAc,CAAC,QAAQ,CAAC,CAAA;AAEzD,MAAM,CAAC,MAAM,cAAc,GAAG,cAAc,CAAC,KAAK,CAAC,CAAA;AAEnD,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,QAA4B,EAAE,UAAU,GAAG,KAAK,EAAE,EAAE;IAC1E,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;QAChC,OAAO,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC;YAC5B,CAAC,UAAU,IAAI,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,MAAM,EAAE,CAAC;YAC9C,CAAC,CAAC,QAAQ;YACV,CAAC,CAAC,EAAE,CAAA;KACP;IAED,KAAK,MAAM,IAAI,IAAI,QAAQ,aAAR,QAAQ,cAAR,QAAQ,GAAI,EAAE,EAAE;QACjC,IAAI,OAAO,CAAC,IAAI,EAAE,UAAU,CAAC,EAAE;YAC7B,OAAO,IAAI,CAAA;SACZ;KACF;IAED,OAAO,EAAE,CAAA;AACX,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,QAAgB,EAAE,UAAU,GAAG,UAAU,EAAE,EAAE;IACzE,MAAM,GAAG,GAAG,CAAC,GAAG,UAAU,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,OAAO,CAAC,QAAQ,GAAG,GAAG,CAAC,CAAC,CAAA;IACpE,OAAO,GAAG,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,GAAG,GAAG,CAAA;AAC1C,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,OAAO,GAAG,CACrB,KAAe,EACf,UAMQ,EAAE,EACV,EAAE;IACF,MAAM,EACJ,QAAQ,GAAG,IAAI,EACf,OAAO,GAAG,GAAG,EACb,MAAM,GAAG,CAAC,oBAAoB,CAAC,GAChC,GAAG,OAAO,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,OAAO,CAAA;IAChE,OAAO,KAAK,CAAC,MAAM,CACjB,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACX;QACE,GAAG,GAAG;QACN,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC;YACb,CAAC,CAAC,aAAa,CAA6B,WAAW,CAAE;iBACpD,IAAI,CAAC,GAAG,EAAE;gBACT,GAAG,EAAE,OAAO;gBACZ,MAAM;gBACN,SAAS,EAAE,KAAK;aACjB,CAAC;iBAED,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;YACjE,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC;KACjD,CAAC,MAAM,CAAC,OAAO,CAAC,EACnB,EAAE,CACH,CAAA;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,QAAQ,GAAG,CACtB,CAAI,EAIJ,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;AAER,MAAM,CAAC,MAAM,MAAM,GAAG,CAAC,WAAmB,EAAE,UAAU,GAAG,cAAc,EAAE,EAAE;IACzE,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC,CAAC,CAAA;IAE5C,IACE,CAAC,OAAO,CAAC,WAAW,EAAE,IAAI,CAAC;QAC3B,CAAC,WAAW,KAAK,GAAG,IAAI,CAAC,WAAW,CAAC,UAAU,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAChE;QACA,OAAO,EAAE,CAAA;KACV;IAED,WAAW,GAAG,IAAI,CAAC,OAAO,CACxB,EAAE,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,WAAW,EAAE;QACpC,CAAC,CAAC,WAAW;QACb,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,IAAI,CAAC,CACpC,CAAA;IAED,GAAG;QACD,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,UAAU,CAAC,CAAC,CAAA;QAC/D,IAAI,QAAQ,EAAE;YACZ,OAAO,QAAQ,CAAA;SAChB;QACD,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,WAAW,EAAE,IAAI,CAAC,CAAA;KAC9C,QAAQ,WAAW,KAAK,GAAG,IAAI,WAAW,CAAC,UAAU,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,EAAC;IAEvE,OAAO,EAAE,CAAA;AACX,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,QAAQ,GAAG,CAItB,GAAG,IAAoB,EACvB,EAAE,CACF,IAAI,CAAC,MAAM,CAAM,CAAC,GAAG,EAAE,IAAI,EAAE,EAAE;IAC7B,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;IACtE,OAAO,GAAG,CAAA;AACZ,CAAC,EAAE,EAAE,CAAC,CAAA;AAER,MAAM,CAAC,MAAM,iBAAiB,GAAG,GAAG,EAAE;IACpC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAA;IAEzC,IAAI,CAAC,QAAQ,EAAE;QACb,OAAM;KACP;IAED,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;QAC7B,OAAO,MAAM,CAAA;KACd;IAED,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;QAC7B,OAAO,MAAM,CAAA;KACd;IAED,IAAI,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;QAC5B,OAAO,KAAK,CAAA;KACb;IAED,OAAO,CAAC,IAAI,CAAC,0BAA0B,EAAE,QAAQ,CAAC,CAAA;AACpD,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,eAAe,GAAG,GAAG,EAAE;IAClC,MAAM,EAAE,GAAG,iBAAiB,EAAE,CAAA;IAE9B,IAAI,CAAC,EAAE,EAAE;QACP,OAAM;KACP;IAED,OAAO,cAAc,CAAC,EAAE,CAAC,CAAA;AAC3B,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,iBAAiB,GAAG,GAAG,EAAE;IACpC,MAAM,EAAE,GAAG,iBAAiB,EAAE,CAAA;IAE9B,IAAI,CAAC,EAAE,EAAE;QACP,OAAM;KACP;IAED,OAAO,gBAAgB,CAAC,EAAE,CAAC,CAAA;AAC7B,CAAC,CAAA"}

View File

@@ -1,306 +0,0 @@
'use strict';
var node_child_process = require('node:child_process');
var path = require('node:path');
var node_url = require('node:url');
var spawn = require('cross-spawn');
var picocolors = require('picocolors');
var node_module = require('node:module');
var fs = require('node:fs');
var isGlob = require('is-glob');
var __async = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
step(generator.next(value));
} catch (e) {
reject(e);
}
};
var rejected = (value) => {
try {
step(generator.throw(value));
} catch (e) {
reject(e);
}
};
var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
step((generator = generator.apply(__this, __arguments)).next());
});
};
const import_meta$1 = {};
const OSX_CHROME = "google chrome";
function getBrowserEnv() {
const value = process.env.BROWSER;
const args = process.env.BROWSER_ARGS ? process.env.BROWSER_ARGS.split(" ") : [];
let action;
if (!value) {
action = 1 /* BROWSER */;
} else if (value.toLowerCase().endsWith(".js")) {
action = 2 /* SCRIPT */;
} else if (value.toLowerCase() === "none") {
action = 0 /* NONE */;
} else {
action = 1 /* BROWSER */;
}
return { action, value, args };
}
function executeNodeScript(scriptPath, url) {
const extraArgs = process.argv.slice(2);
const child = spawn(process.execPath, [scriptPath, ...extraArgs, url], {
stdio: "inherit"
});
child.on("close", (code) => {
if (code !== 0) {
console.log();
console.log(
picocolors.red(
"The script specified as BROWSER environment variable failed."
)
);
console.log(`${picocolors.cyan(scriptPath)} exited with code ${code}`);
console.log();
}
});
return true;
}
function startBrowserProcess(browser, url, args) {
return __async(this, null, function* () {
const shouldTryOpenChromiumWithAppleScript = process.platform === "darwin" && (typeof browser !== "string" || browser === OSX_CHROME);
if (shouldTryOpenChromiumWithAppleScript) {
const supportedChromiumBrowsers = [
"Google Chrome Canary",
"Google Chrome",
"Microsoft Edge",
"Brave Browser",
"Vivaldi",
"Chromium"
];
const _dirname = typeof __dirname === "undefined" ? path.dirname(node_url.fileURLToPath(import_meta$1.url)) : __dirname;
for (const chromiumBrowser of supportedChromiumBrowsers) {
try {
node_child_process.execSync('ps cax | grep "' + chromiumBrowser + '"');
node_child_process.execSync(
'osascript ../openChrome.applescript "' + // lgtm [js/shell-command-constructed-from-input]
encodeURI(url) + '" "' + chromiumBrowser + '"',
{
cwd: _dirname,
stdio: "ignore"
}
);
return true;
} catch (e) {
}
}
}
if (process.platform === "darwin" && browser === "open") {
browser = void 0;
}
try {
const open = (yield import('open')).default;
open(url, {
app: browser ? {
name: browser,
arguments: args
} : void 0,
wait: false
}).catch(() => {
});
return true;
} catch (e) {
return false;
}
});
}
function openBrowser(url) {
return __async(this, null, function* () {
const { action, value, args } = getBrowserEnv();
switch (action) {
case 0 /* NONE */: {
return false;
}
case 2 /* SCRIPT */: {
return executeNodeScript(value, url);
}
case 1 /* BROWSER */: {
return startBrowserProcess(value, url, args);
}
default: {
throw new Error("Not implemented.");
}
}
});
}
const import_meta = {};
var _a$1;
const DEV = "development";
const PROD = "production";
const NODE_ENV = (_a$1 = process.env.NODE_ENV) != null ? _a$1 : DEV;
const __DEV__ = NODE_ENV === DEV;
const __PROD__ = NODE_ENV === PROD;
const NODE_MODULES_REG = /[/\\]node_modules[/\\]/;
const CWD = process.cwd();
const cjsRequire = typeof require === "undefined" ? node_module.createRequire(import_meta.url) : require;
const EXTENSIONS = [".ts", ".tsx", ...Object.keys(cjsRequire.extensions)];
const SCRIPT_RUNNERS = {
npm: "npx",
pnpm: "pnpm",
yarn: "yarn"
};
const SCRIPT_EXECUTORS = {
npm: "npx",
pnpm: "pnpx",
// same as 'pnpm dlx'
yarn: "yarn dlx"
};
const tryPkg = (pkg) => {
try {
return cjsRequire.resolve(pkg);
} catch (e) {
}
};
const tryRequirePkg = (pkg) => {
try {
return cjsRequire(pkg);
} catch (e) {
}
};
const isPkgAvailable = (pkg) => !!tryPkg(pkg);
const isTsAvailable = isPkgAvailable("typescript");
const isAngularAvailable = isPkgAvailable("@angular/core/package.json");
const isMdxAvailable = isPkgAvailable("@mdx-js/mdx/package.json") || isPkgAvailable("@mdx-js/react/package.json");
const isReactAvailable = isPkgAvailable("react");
const isSvelteAvailable = isPkgAvailable("svelte");
const isVueAvailable = isPkgAvailable("vue");
const tryFile = (filePath, includeDir = false) => {
if (typeof filePath === "string") {
return fs.existsSync(filePath) && (includeDir || fs.statSync(filePath).isFile()) ? filePath : "";
}
for (const file of filePath != null ? filePath : []) {
if (tryFile(file, includeDir)) {
return file;
}
}
return "";
};
const tryExtensions = (filepath, extensions = EXTENSIONS) => {
const ext = [...extensions, ""].find((ext2) => tryFile(filepath + ext2));
return ext == null ? "" : filepath + ext;
};
const tryGlob = (paths, options = {}) => {
const {
absolute = true,
baseDir = CWD,
ignore = ["**/node_modules/**"]
} = typeof options === "string" ? { baseDir: options } : options;
return paths.reduce(
(acc, pkg) => [
...acc,
...isGlob(pkg) ? tryRequirePkg("fast-glob").sync(pkg, {
cwd: baseDir,
ignore,
onlyFiles: false
}).map((file) => absolute ? path.resolve(baseDir, file) : file) : [tryFile(path.resolve(baseDir, pkg), true)]
].filter(Boolean),
[]
);
};
const identify = (_) => !!_;
const findUp = (searchEntry, searchFile = "package.json") => {
console.assert(path.isAbsolute(searchEntry));
if (!tryFile(searchEntry, true) || searchEntry !== CWD && !searchEntry.startsWith(CWD + path.sep)) {
return "";
}
searchEntry = path.resolve(
fs.statSync(searchEntry).isDirectory() ? searchEntry : path.resolve(searchEntry, "..")
);
do {
const searched = tryFile(path.resolve(searchEntry, searchFile));
if (searched) {
return searched;
}
searchEntry = path.resolve(searchEntry, "..");
} while (searchEntry === CWD || searchEntry.startsWith(CWD + path.sep));
return "";
};
const arrayify = (...args) => args.reduce((arr, curr) => {
arr.push(...Array.isArray(curr) ? curr : curr == null ? [] : [curr]);
return arr;
}, []);
const getPackageManager = () => {
const execPath = process.env.npm_execpath;
if (!execPath) {
return;
}
if (/\byarn\b/.test(execPath)) {
return "yarn";
}
if (/\bpnpm\b/.test(execPath)) {
return "pnpm";
}
if (/\bnpm\b/.test(execPath)) {
return "npm";
}
console.warn("unknown package manager:", execPath);
};
const getScriptRunner = () => {
const pm = getPackageManager();
if (!pm) {
return;
}
return SCRIPT_RUNNERS[pm];
};
const getScriptExecutor = () => {
const pm = getPackageManager();
if (!pm) {
return;
}
return SCRIPT_EXECUTORS[pm];
};
var _a, _b, _c, _d;
const pkg = (_a = tryRequirePkg(path.resolve("package.json"))) != null ? _a : {};
const lernaConfig = (_b = tryRequirePkg(path.resolve("lerna.json"))) != null ? _b : {};
const pkgsPath = (_d = (_c = lernaConfig.packages) != null ? _c : pkg.workspaces) != null ? _d : [];
const isMonorepo = Array.isArray(pkgsPath) && pkgsPath.length > 0;
const monorepoPkgs = isMonorepo ? tryGlob(
pkgsPath.map(
(pkg2) => pkg2.endsWith("/package.json") ? pkg2 : `${pkg2}/package.json`
)
) : [];
exports.CWD = CWD;
exports.DEV = DEV;
exports.EXTENSIONS = EXTENSIONS;
exports.NODE_ENV = NODE_ENV;
exports.NODE_MODULES_REG = NODE_MODULES_REG;
exports.PROD = PROD;
exports.SCRIPT_EXECUTORS = SCRIPT_EXECUTORS;
exports.SCRIPT_RUNNERS = SCRIPT_RUNNERS;
exports.__DEV__ = __DEV__;
exports.__PROD__ = __PROD__;
exports.arrayify = arrayify;
exports.cjsRequire = cjsRequire;
exports.findUp = findUp;
exports.getPackageManager = getPackageManager;
exports.getScriptExecutor = getScriptExecutor;
exports.getScriptRunner = getScriptRunner;
exports.identify = identify;
exports.isAngularAvailable = isAngularAvailable;
exports.isMdxAvailable = isMdxAvailable;
exports.isMonorepo = isMonorepo;
exports.isPkgAvailable = isPkgAvailable;
exports.isReactAvailable = isReactAvailable;
exports.isSvelteAvailable = isSvelteAvailable;
exports.isTsAvailable = isTsAvailable;
exports.isVueAvailable = isVueAvailable;
exports.monorepoPkgs = monorepoPkgs;
exports.openBrowser = openBrowser;
exports.tryExtensions = tryExtensions;
exports.tryFile = tryFile;
exports.tryGlob = tryGlob;
exports.tryPkg = tryPkg;
exports.tryRequirePkg = tryRequirePkg;

View File

@@ -1 +0,0 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,cAAc,CAAA;AAC5B,cAAc,gBAAgB,CAAA;AAC9B,cAAc,cAAc,CAAA;AAC5B,cAAc,eAAe,CAAA"}

View File

@@ -1,2 +0,0 @@
export declare const isMonorepo: boolean;
export declare const monorepoPkgs: string[];

View File

@@ -1,11 +0,0 @@
var _a, _b, _c, _d;
import path from 'node:path';
import { tryGlob, tryRequirePkg } from './helpers.js';
const pkg = (_a = tryRequirePkg(path.resolve('package.json'))) !== null && _a !== void 0 ? _a : {};
const lernaConfig = (_b = tryRequirePkg(path.resolve('lerna.json'))) !== null && _b !== void 0 ? _b : {};
const pkgsPath = (_d = (_c = lernaConfig.packages) !== null && _c !== void 0 ? _c : pkg.workspaces) !== null && _d !== void 0 ? _d : [];
export const isMonorepo = Array.isArray(pkgsPath) && pkgsPath.length > 0;
export const monorepoPkgs = isMonorepo
? tryGlob(pkgsPath.map(pkg => pkg.endsWith('/package.json') ? pkg : `${pkg}/package.json`))
: [];
//# sourceMappingURL=monorepo.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"monorepo.js","sourceRoot":"","sources":["../src/monorepo.ts"],"names":[],"mappings":";AAAA,OAAO,IAAI,MAAM,WAAW,CAAA;AAE5B,OAAO,EAAE,OAAO,EAAE,aAAa,EAAE,MAAM,cAAc,CAAA;AAErD,MAAM,GAAG,GACP,MAAA,aAAa,CAA4B,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,mCAAI,EAAE,CAAA;AAE9E,MAAM,WAAW,GACf,MAAA,aAAa,CAA0B,IAAI,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC,mCAAI,EAAE,CAAA;AAE1E,MAAM,QAAQ,GAAG,MAAA,MAAA,WAAW,CAAC,QAAQ,mCAAI,GAAG,CAAC,UAAU,mCAAI,EAAE,CAAA;AAE7D,MAAM,CAAC,MAAM,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAA;AAExE,MAAM,CAAC,MAAM,YAAY,GAAG,UAAU;IACpC,CAAC,CAAC,OAAO,CACL,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CACjB,GAAG,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,eAAe,CAC5D,CACF;IACH,CAAC,CAAC,EAAE,CAAA"}

View File

@@ -1,15 +0,0 @@
/******************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */

View File

@@ -1,12 +0,0 @@
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.

View File

@@ -1,164 +0,0 @@
# tslib
This is a runtime library for [TypeScript](https://www.typescriptlang.org/) that contains all of the TypeScript helper functions.
This library is primarily used by the `--importHelpers` flag in TypeScript.
When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file:
```ts
var __assign = (this && this.__assign) || Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
exports.x = {};
exports.y = __assign({}, exports.x);
```
will instead be emitted as something like the following:
```ts
var tslib_1 = require("tslib");
exports.x = {};
exports.y = tslib_1.__assign({}, exports.x);
```
Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead.
For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`.
# Installing
For the latest stable version, run:
## npm
```sh
# TypeScript 3.9.2 or later
npm install tslib
# TypeScript 3.8.4 or earlier
npm install tslib@^1
# TypeScript 2.3.2 or earlier
npm install tslib@1.6.1
```
## yarn
```sh
# TypeScript 3.9.2 or later
yarn add tslib
# TypeScript 3.8.4 or earlier
yarn add tslib@^1
# TypeScript 2.3.2 or earlier
yarn add tslib@1.6.1
```
## bower
```sh
# TypeScript 3.9.2 or later
bower install tslib
# TypeScript 3.8.4 or earlier
bower install tslib@^1
# TypeScript 2.3.2 or earlier
bower install tslib@1.6.1
```
## JSPM
```sh
# TypeScript 3.9.2 or later
jspm install tslib
# TypeScript 3.8.4 or earlier
jspm install tslib@^1
# TypeScript 2.3.2 or earlier
jspm install tslib@1.6.1
```
# Usage
Set the `importHelpers` compiler option on the command line:
```
tsc --importHelpers file.ts
```
or in your tsconfig.json:
```json
{
"compilerOptions": {
"importHelpers": true
}
}
```
#### For bower and JSPM users
You will need to add a `paths` mapping for `tslib`, e.g. For Bower users:
```json
{
"compilerOptions": {
"module": "amd",
"importHelpers": true,
"baseUrl": "./",
"paths": {
"tslib" : ["bower_components/tslib/tslib.d.ts"]
}
}
}
```
For JSPM users:
```json
{
"compilerOptions": {
"module": "system",
"importHelpers": true,
"baseUrl": "./",
"paths": {
"tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"]
}
}
}
```
## Deployment
- Choose your new version number
- Set it in `package.json` and `bower.json`
- Create a tag: `git tag [version]`
- Push the tag: `git push --tags`
- Create a [release in GitHub](https://github.com/microsoft/tslib/releases)
- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow
Done.
# Contribute
There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript.
* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in.
* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls).
* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript).
* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter.
* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md).
# Documentation
* [Quick tutorial](http://www.typescriptlang.org/Tutorial)
* [Programming handbook](http://www.typescriptlang.org/Handbook)
* [Homepage](http://www.typescriptlang.org/)

View File

@@ -1,41 +0,0 @@
<!-- BEGIN MICROSOFT SECURITY.MD V0.0.7 BLOCK -->
## Security
Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below.
## Reporting Security Issues
**Please do not report security vulnerabilities through public GitHub issues.**
Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report).
If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey).
You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc).
Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
* Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
* Full paths of source file(s) related to the manifestation of the issue
* The location of the affected source code (tag/branch/commit or direct URL)
* Any special configuration required to reproduce the issue
* Step-by-step instructions to reproduce the issue
* Proof-of-concept or exploit code (if possible)
* Impact of the issue, including how an attacker might exploit the issue
This information will help us triage your report more quickly.
If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs.
## Preferred Languages
We prefer all communications to be in English.
## Policy
Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd).
<!-- END MICROSOFT SECURITY.MD BLOCK -->

View File

@@ -1,37 +0,0 @@
// Note: named reexports are used instead of `export *` because
// TypeScript itself doesn't resolve the `export *` when checking
// if a particular helper exists.
export {
__extends,
__assign,
__rest,
__decorate,
__param,
__esDecorate,
__runInitializers,
__propKey,
__setFunctionName,
__metadata,
__awaiter,
__generator,
__exportStar,
__values,
__read,
__spread,
__spreadArrays,
__spreadArray,
__await,
__asyncGenerator,
__asyncDelegator,
__asyncValues,
__makeTemplateObject,
__importStar,
__importDefault,
__classPrivateFieldGet,
__classPrivateFieldSet,
__classPrivateFieldIn,
__createBinding,
__addDisposableResource,
__disposeResources,
} from '../tslib.js';
export * as default from '../tslib.js';

View File

@@ -1,68 +0,0 @@
import tslib from '../tslib.js';
const {
__extends,
__assign,
__rest,
__decorate,
__param,
__esDecorate,
__runInitializers,
__propKey,
__setFunctionName,
__metadata,
__awaiter,
__generator,
__exportStar,
__createBinding,
__values,
__read,
__spread,
__spreadArrays,
__spreadArray,
__await,
__asyncGenerator,
__asyncDelegator,
__asyncValues,
__makeTemplateObject,
__importStar,
__importDefault,
__classPrivateFieldGet,
__classPrivateFieldSet,
__classPrivateFieldIn,
__addDisposableResource,
__disposeResources,
} = tslib;
export {
__extends,
__assign,
__rest,
__decorate,
__param,
__esDecorate,
__runInitializers,
__propKey,
__setFunctionName,
__metadata,
__awaiter,
__generator,
__exportStar,
__createBinding,
__values,
__read,
__spread,
__spreadArrays,
__spreadArray,
__await,
__asyncGenerator,
__asyncDelegator,
__asyncValues,
__makeTemplateObject,
__importStar,
__importDefault,
__classPrivateFieldGet,
__classPrivateFieldSet,
__classPrivateFieldIn,
__addDisposableResource,
__disposeResources,
};
export default tslib;

View File

@@ -1,47 +0,0 @@
{
"name": "tslib",
"author": "Microsoft Corp.",
"homepage": "https://www.typescriptlang.org/",
"version": "2.6.0",
"license": "0BSD",
"description": "Runtime library for TypeScript helper functions",
"keywords": [
"TypeScript",
"Microsoft",
"compiler",
"language",
"javascript",
"tslib",
"runtime"
],
"bugs": {
"url": "https://github.com/Microsoft/TypeScript/issues"
},
"repository": {
"type": "git",
"url": "https://github.com/Microsoft/tslib.git"
},
"main": "tslib.js",
"module": "tslib.es6.js",
"jsnext:main": "tslib.es6.js",
"typings": "tslib.d.ts",
"sideEffects": false,
"exports": {
".": {
"module": {
"types": "./tslib/modules/index.d.ts",
"default": "./tslib.es6.mjs"
},
"import": {
"node": "./modules/index.js",
"default": {
"types": "./modules/index.d.ts",
"default": "./tslib.es6.mjs"
}
},
"default": "./tslib.js"
},
"./*": "./*",
"./": "./"
}
}

View File

@@ -1,453 +0,0 @@
/******************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */
/**
* Used to shim class extends.
*
* @param d The derived class.
* @param b The base class.
*/
export declare function __extends(d: Function, b: Function): void;
/**
* Copy the values of all of the enumerable own properties from one or more source objects to a
* target object. Returns the target object.
*
* @param t The target object to copy to.
* @param sources One or more source objects from which to copy properties
*/
export declare function __assign(t: any, ...sources: any[]): any;
/**
* Performs a rest spread on an object.
*
* @param t The source value.
* @param propertyNames The property names excluded from the rest spread.
*/
export declare function __rest(t: any, propertyNames: (string | symbol)[]): any;
/**
* Applies decorators to a target object
*
* @param decorators The set of decorators to apply.
* @param target The target object.
* @param key If specified, the own property to apply the decorators to.
* @param desc The property descriptor, defaults to fetching the descriptor from the target object.
* @experimental
*/
export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any;
/**
* Creates an observing function decorator from a parameter decorator.
*
* @param paramIndex The parameter index to apply the decorator to.
* @param decorator The parameter decorator to apply. Note that the return value is ignored.
* @experimental
*/
export declare function __param(paramIndex: number, decorator: Function): Function;
/**
* Applies decorators to a class or class member, following the native ECMAScript decorator specification.
* @param ctor For non-field class members, the class constructor. Otherwise, `null`.
* @param descriptorIn The `PropertyDescriptor` to use when unable to look up the property from `ctor`.
* @param decorators The decorators to apply
* @param contextIn The `DecoratorContext` to clone for each decorator application.
* @param initializers An array of field initializer mutation functions into which new initializers are written.
* @param extraInitializers An array of extra initializer functions into which new initializers are written.
*/
export declare function __esDecorate(ctor: Function | null, descriptorIn: object | null, decorators: Function[], contextIn: object, initializers: Function[] | null, extraInitializers: Function[]): void;
/**
* Runs field initializers or extra initializers generated by `__esDecorate`.
* @param thisArg The `this` argument to use.
* @param initializers The array of initializers to evaluate.
* @param value The initial value to pass to the initializers.
*/
export declare function __runInitializers(thisArg: unknown, initializers: Function[], value?: any): any;
/**
* Converts a computed property name into a `string` or `symbol` value.
*/
export declare function __propKey(x: any): string | symbol;
/**
* Assigns the name of a function derived from the left-hand side of an assignment.
* @param f The function to rename.
* @param name The new name for the function.
* @param prefix A prefix (such as `"get"` or `"set"`) to insert before the name.
*/
export declare function __setFunctionName(f: Function, name: string | symbol, prefix?: string): Function;
/**
* Creates a decorator that sets metadata.
*
* @param metadataKey The metadata key
* @param metadataValue The metadata value
* @experimental
*/
export declare function __metadata(metadataKey: any, metadataValue: any): Function;
/**
* Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`.
*
* @param thisArg The reference to use as the `this` value in the generator function
* @param _arguments The optional arguments array
* @param P The optional promise constructor argument, defaults to the `Promise` property of the global object.
* @param generator The generator function
*/
export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any;
/**
* Creates an Iterator object using the body as the implementation.
*
* @param thisArg The reference to use as the `this` value in the function
* @param body The generator state-machine based implementation.
*
* @see [./docs/generator.md]
*/
export declare function __generator(thisArg: any, body: Function): any;
/**
* Creates bindings for all enumerable properties of `m` on `exports`
*
* @param m The source object
* @param exports The `exports` object.
*/
export declare function __exportStar(m: any, o: any): void;
/**
* Creates a value iterator from an `Iterable` or `ArrayLike` object.
*
* @param o The object.
* @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`.
*/
export declare function __values(o: any): any;
/**
* Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array.
*
* @param o The object to read from.
* @param n The maximum number of arguments to read, defaults to `Infinity`.
*/
export declare function __read(o: any, n?: number): any[];
/**
* Creates an array from iterable spread.
*
* @param args The Iterable objects to spread.
* @deprecated since TypeScript 4.2 - Use `__spreadArray`
*/
export declare function __spread(...args: any[][]): any[];
/**
* Creates an array from array spread.
*
* @param args The ArrayLikes to spread into the resulting array.
* @deprecated since TypeScript 4.2 - Use `__spreadArray`
*/
export declare function __spreadArrays(...args: any[][]): any[];
/**
* Spreads the `from` array into the `to` array.
*
* @param pack Replace empty elements with `undefined`.
*/
export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[];
/**
* Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded,
* and instead should be awaited and the resulting value passed back to the generator.
*
* @param v The value to await.
*/
export declare function __await(v: any): any;
/**
* Converts a generator function into an async generator function, by using `yield __await`
* in place of normal `await`.
*
* @param thisArg The reference to use as the `this` value in the generator function
* @param _arguments The optional arguments array
* @param generator The generator function
*/
export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any;
/**
* Used to wrap a potentially async iterator in such a way so that it wraps the result
* of calling iterator methods of `o` in `__await` instances, and then yields the awaited values.
*
* @param o The potentially async iterator.
* @returns A synchronous iterator yielding `__await` instances on every odd invocation
* and returning the awaited `IteratorResult` passed to `next` every even invocation.
*/
export declare function __asyncDelegator(o: any): any;
/**
* Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object.
*
* @param o The object.
* @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`.
*/
export declare function __asyncValues(o: any): any;
/**
* Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays.
*
* @param cooked The cooked possibly-sparse array.
* @param raw The raw string content.
*/
export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray;
/**
* Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS.
*
* ```js
* import Default, { Named, Other } from "mod";
* // or
* import { default as Default, Named, Other } from "mod";
* ```
*
* @param mod The CommonJS module exports object.
*/
export declare function __importStar<T>(mod: T): T;
/**
* Used to shim default imports in ECMAScript Modules transpiled to CommonJS.
*
* ```js
* import Default from "mod";
* ```
*
* @param mod The CommonJS module exports object.
*/
export declare function __importDefault<T>(mod: T): T | { default: T };
/**
* Emulates reading a private instance field.
*
* @param receiver The instance from which to read the private field.
* @param state A WeakMap containing the private field value for an instance.
* @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method.
*
* @throws {TypeError} If `state` doesn't have an entry for `receiver`.
*/
export declare function __classPrivateFieldGet<T extends object, V>(
receiver: T,
state: { has(o: T): boolean, get(o: T): V | undefined },
kind?: "f"
): V;
/**
* Emulates reading a private static field.
*
* @param receiver The object from which to read the private static field.
* @param state The class constructor containing the definition of the static field.
* @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method.
* @param f The descriptor that holds the static field value.
*
* @throws {TypeError} If `receiver` is not `state`.
*/
export declare function __classPrivateFieldGet<T extends new (...args: any[]) => unknown, V>(
receiver: T,
state: T,
kind: "f",
f: { value: V }
): V;
/**
* Emulates evaluating a private instance "get" accessor.
*
* @param receiver The instance on which to evaluate the private "get" accessor.
* @param state A WeakSet used to verify an instance supports the private "get" accessor.
* @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method.
* @param f The "get" accessor function to evaluate.
*
* @throws {TypeError} If `state` doesn't have an entry for `receiver`.
*/
export declare function __classPrivateFieldGet<T extends object, V>(
receiver: T,
state: { has(o: T): boolean },
kind: "a",
f: () => V
): V;
/**
* Emulates evaluating a private static "get" accessor.
*
* @param receiver The object on which to evaluate the private static "get" accessor.
* @param state The class constructor containing the definition of the static "get" accessor.
* @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method.
* @param f The "get" accessor function to evaluate.
*
* @throws {TypeError} If `receiver` is not `state`.
*/
export declare function __classPrivateFieldGet<T extends new (...args: any[]) => unknown, V>(
receiver: T,
state: T,
kind: "a",
f: () => V
): V;
/**
* Emulates reading a private instance method.
*
* @param receiver The instance from which to read a private method.
* @param state A WeakSet used to verify an instance supports the private method.
* @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method.
* @param f The function to return as the private instance method.
*
* @throws {TypeError} If `state` doesn't have an entry for `receiver`.
*/
export declare function __classPrivateFieldGet<T extends object, V extends (...args: any[]) => unknown>(
receiver: T,
state: { has(o: T): boolean },
kind: "m",
f: V
): V;
/**
* Emulates reading a private static method.
*
* @param receiver The object from which to read the private static method.
* @param state The class constructor containing the definition of the static method.
* @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method.
* @param f The function to return as the private static method.
*
* @throws {TypeError} If `receiver` is not `state`.
*/
export declare function __classPrivateFieldGet<T extends new (...args: any[]) => unknown, V extends (...args: any[]) => unknown>(
receiver: T,
state: T,
kind: "m",
f: V
): V;
/**
* Emulates writing to a private instance field.
*
* @param receiver The instance on which to set a private field value.
* @param state A WeakMap used to store the private field value for an instance.
* @param value The value to store in the private field.
* @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method.
*
* @throws {TypeError} If `state` doesn't have an entry for `receiver`.
*/
export declare function __classPrivateFieldSet<T extends object, V>(
receiver: T,
state: { has(o: T): boolean, set(o: T, value: V): unknown },
value: V,
kind?: "f"
): V;
/**
* Emulates writing to a private static field.
*
* @param receiver The object on which to set the private static field.
* @param state The class constructor containing the definition of the private static field.
* @param value The value to store in the private field.
* @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method.
* @param f The descriptor that holds the static field value.
*
* @throws {TypeError} If `receiver` is not `state`.
*/
export declare function __classPrivateFieldSet<T extends new (...args: any[]) => unknown, V>(
receiver: T,
state: T,
value: V,
kind: "f",
f: { value: V }
): V;
/**
* Emulates writing to a private instance "set" accessor.
*
* @param receiver The instance on which to evaluate the private instance "set" accessor.
* @param state A WeakSet used to verify an instance supports the private "set" accessor.
* @param value The value to store in the private accessor.
* @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method.
* @param f The "set" accessor function to evaluate.
*
* @throws {TypeError} If `state` doesn't have an entry for `receiver`.
*/
export declare function __classPrivateFieldSet<T extends object, V>(
receiver: T,
state: { has(o: T): boolean },
value: V,
kind: "a",
f: (v: V) => void
): V;
/**
* Emulates writing to a private static "set" accessor.
*
* @param receiver The object on which to evaluate the private static "set" accessor.
* @param state The class constructor containing the definition of the static "set" accessor.
* @param value The value to store in the private field.
* @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method.
* @param f The "set" accessor function to evaluate.
*
* @throws {TypeError} If `receiver` is not `state`.
*/
export declare function __classPrivateFieldSet<T extends new (...args: any[]) => unknown, V>(
receiver: T,
state: T,
value: V,
kind: "a",
f: (v: V) => void
): V;
/**
* Checks for the existence of a private field/method/accessor.
*
* @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member.
* @param receiver The object for which to test the presence of the private member.
*/
export declare function __classPrivateFieldIn(
state: (new (...args: any[]) => unknown) | { has(o: any): boolean },
receiver: unknown,
): boolean;
/**
* Creates a re-export binding on `object` with key `objectKey` that references `target[key]`.
*
* @param object The local `exports` object.
* @param target The object to re-export from.
* @param key The property key of `target` to re-export.
* @param objectKey The property key to re-export as. Defaults to `key`.
*/
export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void;
/**
* Adds a disposable resource to a resource-tracking environment object.
* @param env A resource-tracking environment object.
* @param value Either a Disposable or AsyncDisposable object, `null`, or `undefined`.
* @param async When `true`, `AsyncDisposable` resources can be added. When `false`, `AsyncDisposable` resources cannot be added.
* @returns The {@link value} argument.
*
* @throws {TypeError} If {@link value} is not an object, or if either `Symbol.dispose` or `Symbol.asyncDispose` are not
* defined, or if {@link value} does not have an appropriate `Symbol.dispose` or `Symbol.asyncDispose` method.
*/
export declare function __addDisposableResource<T>(env: { stack: { value?: unknown, dispose?: Function, async: boolean }[]; error: unknown; hasError: boolean; }, value: T, async: boolean): T;
/**
* Disposes all resources in a resource-tracking environment object.
* @param env A resource-tracking environment object.
* @returns A {@link Promise} if any resources in the environment were marked as `async` when added; otherwise, `void`.
*
* @throws {SuppressedError} if an error thrown during disposal would have suppressed a prior error from disposal or the
* error recorded in the resource-tracking environment object.
* @seealso {@link __addDisposableResource}
*/
export declare function __disposeResources(env: { stack: { value?: unknown, dispose?: Function, async: boolean }[]; error: unknown; hasError: boolean; }): any;

View File

@@ -1 +0,0 @@
<script src="tslib.es6.js"></script>

View File

@@ -1,370 +0,0 @@
/******************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */
/* global Reflect, Promise, SuppressedError, Symbol */
var extendStatics = function(d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
export function __extends(d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
}
export var __assign = function() {
__assign = Object.assign || function __assign(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
}
return t;
}
return __assign.apply(this, arguments);
}
export function __rest(s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
}
export function __decorate(decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
}
export function __param(paramIndex, decorator) {
return function (target, key) { decorator(target, key, paramIndex); }
}
export function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) {
function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; }
var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value";
var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null;
var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {});
var _, done = false;
for (var i = decorators.length - 1; i >= 0; i--) {
var context = {};
for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p];
for (var p in contextIn.access) context.access[p] = contextIn.access[p];
context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); };
var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context);
if (kind === "accessor") {
if (result === void 0) continue;
if (result === null || typeof result !== "object") throw new TypeError("Object expected");
if (_ = accept(result.get)) descriptor.get = _;
if (_ = accept(result.set)) descriptor.set = _;
if (_ = accept(result.init)) initializers.unshift(_);
}
else if (_ = accept(result)) {
if (kind === "field") initializers.unshift(_);
else descriptor[key] = _;
}
}
if (target) Object.defineProperty(target, contextIn.name, descriptor);
done = true;
};
export function __runInitializers(thisArg, initializers, value) {
var useValue = arguments.length > 2;
for (var i = 0; i < initializers.length; i++) {
value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg);
}
return useValue ? value : void 0;
};
export function __propKey(x) {
return typeof x === "symbol" ? x : "".concat(x);
};
export function __setFunctionName(f, name, prefix) {
if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : "";
return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name });
};
export function __metadata(metadataKey, metadataValue) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue);
}
export function __awaiter(thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
}
export function __generator(thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (g && (g = 0, op[0] && (_ = 0)), _) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
}
export var __createBinding = Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
});
export function __exportStar(m, o) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);
}
export function __values(o) {
var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
if (m) return m.call(o);
if (o && typeof o.length === "number") return {
next: function () {
if (o && i >= o.length) o = void 0;
return { value: o && o[i++], done: !o };
}
};
throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
}
export function __read(o, n) {
var m = typeof Symbol === "function" && o[Symbol.iterator];
if (!m) return o;
var i = m.call(o), r, ar = [], e;
try {
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
}
catch (error) { e = { error: error }; }
finally {
try {
if (r && !r.done && (m = i["return"])) m.call(i);
}
finally { if (e) throw e.error; }
}
return ar;
}
/** @deprecated */
export function __spread() {
for (var ar = [], i = 0; i < arguments.length; i++)
ar = ar.concat(__read(arguments[i]));
return ar;
}
/** @deprecated */
export function __spreadArrays() {
for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;
for (var r = Array(s), k = 0, i = 0; i < il; i++)
for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)
r[k] = a[j];
return r;
}
export function __spreadArray(to, from, pack) {
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
if (ar || !(i in from)) {
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
ar[i] = from[i];
}
}
return to.concat(ar || Array.prototype.slice.call(from));
}
export function __await(v) {
return this instanceof __await ? (this.v = v, this) : new __await(v);
}
export function __asyncGenerator(thisArg, _arguments, generator) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var g = generator.apply(thisArg, _arguments || []), i, q = [];
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
function fulfill(value) { resume("next", value); }
function reject(value) { resume("throw", value); }
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
}
export function __asyncDelegator(o) {
var i, p;
return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i;
function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; }
}
export function __asyncValues(o) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var m = o[Symbol.asyncIterator], i;
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
}
export function __makeTemplateObject(cooked, raw) {
if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; }
return cooked;
};
var __setModuleDefault = Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
};
export function __importStar(mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
}
export function __importDefault(mod) {
return (mod && mod.__esModule) ? mod : { default: mod };
}
export function __classPrivateFieldGet(receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
}
export function __classPrivateFieldSet(receiver, state, value, kind, f) {
if (kind === "m") throw new TypeError("Private method is not writable");
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
}
export function __classPrivateFieldIn(state, receiver) {
if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object");
return typeof state === "function" ? receiver === state : state.has(receiver);
}
export function __addDisposableResource(env, value, async) {
if (value !== null && value !== void 0) {
if (typeof value !== "object") throw new TypeError("Object expected.");
var dispose;
if (async) {
if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
dispose = value[Symbol.asyncDispose];
}
if (dispose === void 0) {
if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
dispose = value[Symbol.dispose];
}
if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
env.stack.push({ value: value, dispose: dispose, async: async });
}
else if (async) {
env.stack.push({ async: true });
}
return value;
}
var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
var e = new Error(message);
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
};
export function __disposeResources(env) {
function fail(e) {
env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
env.hasError = true;
}
function next() {
while (env.stack.length) {
var rec = env.stack.pop();
try {
var result = rec.dispose && rec.dispose.call(rec.value);
if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
}
catch (e) {
fail(e);
}
}
if (env.hasError) throw env.error;
}
return next();
}
export default {
__extends,
__assign,
__rest,
__decorate,
__param,
__metadata,
__awaiter,
__generator,
__createBinding,
__exportStar,
__values,
__read,
__spread,
__spreadArrays,
__spreadArray,
__await,
__asyncGenerator,
__asyncDelegator,
__asyncValues,
__makeTemplateObject,
__importStar,
__importDefault,
__classPrivateFieldGet,
__classPrivateFieldSet,
__classPrivateFieldIn,
__addDisposableResource,
__disposeResources,
};

View File

@@ -1,370 +0,0 @@
/******************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */
/* global Reflect, Promise, SuppressedError, Symbol */
var extendStatics = function(d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
export function __extends(d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
}
export var __assign = function() {
__assign = Object.assign || function __assign(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
}
return t;
}
return __assign.apply(this, arguments);
}
export function __rest(s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
}
export function __decorate(decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
}
export function __param(paramIndex, decorator) {
return function (target, key) { decorator(target, key, paramIndex); }
}
export function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) {
function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; }
var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value";
var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null;
var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {});
var _, done = false;
for (var i = decorators.length - 1; i >= 0; i--) {
var context = {};
for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p];
for (var p in contextIn.access) context.access[p] = contextIn.access[p];
context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); };
var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context);
if (kind === "accessor") {
if (result === void 0) continue;
if (result === null || typeof result !== "object") throw new TypeError("Object expected");
if (_ = accept(result.get)) descriptor.get = _;
if (_ = accept(result.set)) descriptor.set = _;
if (_ = accept(result.init)) initializers.unshift(_);
}
else if (_ = accept(result)) {
if (kind === "field") initializers.unshift(_);
else descriptor[key] = _;
}
}
if (target) Object.defineProperty(target, contextIn.name, descriptor);
done = true;
};
export function __runInitializers(thisArg, initializers, value) {
var useValue = arguments.length > 2;
for (var i = 0; i < initializers.length; i++) {
value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg);
}
return useValue ? value : void 0;
};
export function __propKey(x) {
return typeof x === "symbol" ? x : "".concat(x);
};
export function __setFunctionName(f, name, prefix) {
if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : "";
return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name });
};
export function __metadata(metadataKey, metadataValue) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue);
}
export function __awaiter(thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
}
export function __generator(thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (g && (g = 0, op[0] && (_ = 0)), _) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
}
export var __createBinding = Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
});
export function __exportStar(m, o) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);
}
export function __values(o) {
var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
if (m) return m.call(o);
if (o && typeof o.length === "number") return {
next: function () {
if (o && i >= o.length) o = void 0;
return { value: o && o[i++], done: !o };
}
};
throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
}
export function __read(o, n) {
var m = typeof Symbol === "function" && o[Symbol.iterator];
if (!m) return o;
var i = m.call(o), r, ar = [], e;
try {
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
}
catch (error) { e = { error: error }; }
finally {
try {
if (r && !r.done && (m = i["return"])) m.call(i);
}
finally { if (e) throw e.error; }
}
return ar;
}
/** @deprecated */
export function __spread() {
for (var ar = [], i = 0; i < arguments.length; i++)
ar = ar.concat(__read(arguments[i]));
return ar;
}
/** @deprecated */
export function __spreadArrays() {
for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;
for (var r = Array(s), k = 0, i = 0; i < il; i++)
for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)
r[k] = a[j];
return r;
}
export function __spreadArray(to, from, pack) {
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
if (ar || !(i in from)) {
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
ar[i] = from[i];
}
}
return to.concat(ar || Array.prototype.slice.call(from));
}
export function __await(v) {
return this instanceof __await ? (this.v = v, this) : new __await(v);
}
export function __asyncGenerator(thisArg, _arguments, generator) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var g = generator.apply(thisArg, _arguments || []), i, q = [];
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
function fulfill(value) { resume("next", value); }
function reject(value) { resume("throw", value); }
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
}
export function __asyncDelegator(o) {
var i, p;
return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i;
function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; }
}
export function __asyncValues(o) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var m = o[Symbol.asyncIterator], i;
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
}
export function __makeTemplateObject(cooked, raw) {
if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; }
return cooked;
};
var __setModuleDefault = Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
};
export function __importStar(mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
}
export function __importDefault(mod) {
return (mod && mod.__esModule) ? mod : { default: mod };
}
export function __classPrivateFieldGet(receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
}
export function __classPrivateFieldSet(receiver, state, value, kind, f) {
if (kind === "m") throw new TypeError("Private method is not writable");
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
}
export function __classPrivateFieldIn(state, receiver) {
if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object");
return typeof state === "function" ? receiver === state : state.has(receiver);
}
export function __addDisposableResource(env, value, async) {
if (value !== null && value !== void 0) {
if (typeof value !== "object") throw new TypeError("Object expected.");
var dispose;
if (async) {
if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
dispose = value[Symbol.asyncDispose];
}
if (dispose === void 0) {
if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
dispose = value[Symbol.dispose];
}
if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
env.stack.push({ value: value, dispose: dispose, async: async });
}
else if (async) {
env.stack.push({ async: true });
}
return value;
}
var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
var e = new Error(message);
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
};
export function __disposeResources(env) {
function fail(e) {
env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
env.hasError = true;
}
function next() {
while (env.stack.length) {
var rec = env.stack.pop();
try {
var result = rec.dispose && rec.dispose.call(rec.value);
if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
}
catch (e) {
fail(e);
}
}
if (env.hasError) throw env.error;
}
return next();
}
export default {
__extends,
__assign,
__rest,
__decorate,
__param,
__metadata,
__awaiter,
__generator,
__createBinding,
__exportStar,
__values,
__read,
__spread,
__spreadArrays,
__spreadArray,
__await,
__asyncGenerator,
__asyncDelegator,
__asyncValues,
__makeTemplateObject,
__importStar,
__importDefault,
__classPrivateFieldGet,
__classPrivateFieldSet,
__classPrivateFieldIn,
__addDisposableResource,
__disposeResources,
};

View File

@@ -1 +0,0 @@
<script src="tslib.js"></script>

View File

@@ -1,421 +0,0 @@
/******************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */
/* global global, define, Symbol, Reflect, Promise, SuppressedError */
var __extends;
var __assign;
var __rest;
var __decorate;
var __param;
var __esDecorate;
var __runInitializers;
var __propKey;
var __setFunctionName;
var __metadata;
var __awaiter;
var __generator;
var __exportStar;
var __values;
var __read;
var __spread;
var __spreadArrays;
var __spreadArray;
var __await;
var __asyncGenerator;
var __asyncDelegator;
var __asyncValues;
var __makeTemplateObject;
var __importStar;
var __importDefault;
var __classPrivateFieldGet;
var __classPrivateFieldSet;
var __classPrivateFieldIn;
var __createBinding;
var __addDisposableResource;
var __disposeResources;
(function (factory) {
var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {};
if (typeof define === "function" && define.amd) {
define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); });
}
else if (typeof module === "object" && typeof module.exports === "object") {
factory(createExporter(root, createExporter(module.exports)));
}
else {
factory(createExporter(root));
}
function createExporter(exports, previous) {
if (exports !== root) {
if (typeof Object.create === "function") {
Object.defineProperty(exports, "__esModule", { value: true });
}
else {
exports.__esModule = true;
}
}
return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };
}
})
(function (exporter) {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
__extends = function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
__assign = Object.assign || function (t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
}
return t;
};
__rest = function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
__decorate = function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
__param = function (paramIndex, decorator) {
return function (target, key) { decorator(target, key, paramIndex); }
};
__esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) {
function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; }
var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value";
var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null;
var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {});
var _, done = false;
for (var i = decorators.length - 1; i >= 0; i--) {
var context = {};
for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p];
for (var p in contextIn.access) context.access[p] = contextIn.access[p];
context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); };
var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context);
if (kind === "accessor") {
if (result === void 0) continue;
if (result === null || typeof result !== "object") throw new TypeError("Object expected");
if (_ = accept(result.get)) descriptor.get = _;
if (_ = accept(result.set)) descriptor.set = _;
if (_ = accept(result.init)) initializers.unshift(_);
}
else if (_ = accept(result)) {
if (kind === "field") initializers.unshift(_);
else descriptor[key] = _;
}
}
if (target) Object.defineProperty(target, contextIn.name, descriptor);
done = true;
};
__runInitializers = function (thisArg, initializers, value) {
var useValue = arguments.length > 2;
for (var i = 0; i < initializers.length; i++) {
value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg);
}
return useValue ? value : void 0;
};
__propKey = function (x) {
return typeof x === "symbol" ? x : "".concat(x);
};
__setFunctionName = function (f, name, prefix) {
if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : "";
return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name });
};
__metadata = function (metadataKey, metadataValue) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue);
};
__awaiter = function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
__generator = function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (g && (g = 0, op[0] && (_ = 0)), _) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
__exportStar = function(m, o) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);
};
__createBinding = Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
});
__values = function (o) {
var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
if (m) return m.call(o);
if (o && typeof o.length === "number") return {
next: function () {
if (o && i >= o.length) o = void 0;
return { value: o && o[i++], done: !o };
}
};
throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
};
__read = function (o, n) {
var m = typeof Symbol === "function" && o[Symbol.iterator];
if (!m) return o;
var i = m.call(o), r, ar = [], e;
try {
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
}
catch (error) { e = { error: error }; }
finally {
try {
if (r && !r.done && (m = i["return"])) m.call(i);
}
finally { if (e) throw e.error; }
}
return ar;
};
/** @deprecated */
__spread = function () {
for (var ar = [], i = 0; i < arguments.length; i++)
ar = ar.concat(__read(arguments[i]));
return ar;
};
/** @deprecated */
__spreadArrays = function () {
for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;
for (var r = Array(s), k = 0, i = 0; i < il; i++)
for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)
r[k] = a[j];
return r;
};
__spreadArray = function (to, from, pack) {
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
if (ar || !(i in from)) {
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
ar[i] = from[i];
}
}
return to.concat(ar || Array.prototype.slice.call(from));
};
__await = function (v) {
return this instanceof __await ? (this.v = v, this) : new __await(v);
};
__asyncGenerator = function (thisArg, _arguments, generator) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var g = generator.apply(thisArg, _arguments || []), i, q = [];
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
function fulfill(value) { resume("next", value); }
function reject(value) { resume("throw", value); }
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
};
__asyncDelegator = function (o) {
var i, p;
return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i;
function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; }
};
__asyncValues = function (o) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var m = o[Symbol.asyncIterator], i;
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
};
__makeTemplateObject = function (cooked, raw) {
if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; }
return cooked;
};
var __setModuleDefault = Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
};
__importStar = function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
__importDefault = function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
__classPrivateFieldGet = function (receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
};
__classPrivateFieldSet = function (receiver, state, value, kind, f) {
if (kind === "m") throw new TypeError("Private method is not writable");
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
};
__classPrivateFieldIn = function (state, receiver) {
if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object");
return typeof state === "function" ? receiver === state : state.has(receiver);
};
__addDisposableResource = function (env, value, async) {
if (value !== null && value !== void 0) {
if (typeof value !== "object") throw new TypeError("Object expected.");
var dispose;
if (async) {
if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
dispose = value[Symbol.asyncDispose];
}
if (dispose === void 0) {
if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
dispose = value[Symbol.dispose];
}
if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
env.stack.push({ value: value, dispose: dispose, async: async });
}
else if (async) {
env.stack.push({ async: true });
}
return value;
};
var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
var e = new Error(message);
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
};
__disposeResources = function (env) {
function fail(e) {
env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
env.hasError = true;
}
function next() {
while (env.stack.length) {
var rec = env.stack.pop();
try {
var result = rec.dispose && rec.dispose.call(rec.value);
if (rec.async) return Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
}
catch (e) {
fail(e);
}
}
if (env.hasError) throw env.error;
}
return next();
};
exporter("__extends", __extends);
exporter("__assign", __assign);
exporter("__rest", __rest);
exporter("__decorate", __decorate);
exporter("__param", __param);
exporter("__esDecorate", __esDecorate);
exporter("__runInitializers", __runInitializers);
exporter("__propKey", __propKey);
exporter("__setFunctionName", __setFunctionName);
exporter("__metadata", __metadata);
exporter("__awaiter", __awaiter);
exporter("__generator", __generator);
exporter("__exportStar", __exportStar);
exporter("__createBinding", __createBinding);
exporter("__values", __values);
exporter("__read", __read);
exporter("__spread", __spread);
exporter("__spreadArrays", __spreadArrays);
exporter("__spreadArray", __spreadArray);
exporter("__await", __await);
exporter("__asyncGenerator", __asyncGenerator);
exporter("__asyncDelegator", __asyncDelegator);
exporter("__asyncValues", __asyncValues);
exporter("__makeTemplateObject", __makeTemplateObject);
exporter("__importStar", __importStar);
exporter("__importDefault", __importDefault);
exporter("__classPrivateFieldGet", __classPrivateFieldGet);
exporter("__classPrivateFieldSet", __classPrivateFieldSet);
exporter("__classPrivateFieldIn", __classPrivateFieldIn);
exporter("__addDisposableResource", __addDisposableResource);
exporter("__disposeResources", __disposeResources);
});

View File

@@ -1,87 +0,0 @@
property targetTab: null
property targetTabIndex: -1
property targetWindow: null
property theProgram: "Google Chrome"
on run argv
set theURL to item 1 of argv
-- Allow requested program to be optional,
-- default to Google Chrome
if (count of argv) > 1 then
set theProgram to item 2 of argv
end if
using terms from application "Google Chrome"
tell application theProgram
if (count every window) = 0 then
make new window
end if
-- 1: Looking for tab running debugger
-- then, Reload debugging tab if found
-- then return
set found to my lookupTabWithUrl(theURL)
if found then
set targetWindow's active tab index to targetTabIndex
tell targetTab to reload
tell targetWindow to activate
set index of targetWindow to 1
return
end if
-- 2: Looking for Empty tab
-- In case debugging tab was not found
-- We try to find an empty tab instead
set found to my lookupTabWithUrl("chrome://newtab/")
if found then
set targetWindow's active tab index to targetTabIndex
set URL of targetTab to theURL
tell targetWindow to activate
return
end if
-- 3: Create new tab
-- both debugging and empty tab were not found
-- make a new tab with url
tell window 1
activate
make new tab with properties {URL:theURL}
end tell
end tell
end using terms from
end run
-- Function:
-- Lookup tab with given url
-- if found, store tab, index, and window in properties
-- (properties were declared on top of file)
on lookupTabWithUrl(lookupUrl)
using terms from application "Google Chrome"
tell application theProgram
-- Find a tab with the given url
set found to false
set theTabIndex to -1
repeat with theWindow in every window
set theTabIndex to 0
repeat with theTab in every tab of theWindow
set theTabIndex to theTabIndex + 1
if (theTab's URL as string) contains lookupUrl then
-- assign tab, tab index, and window to properties
set targetTab to theTab
set targetTabIndex to theTabIndex
set targetWindow to theWindow
set found to true
exit repeat
end if
end repeat
if found then
exit repeat
end if
end repeat
end tell
end using terms from
return found
end lookupTabWithUrl

Some files were not shown because too many files have changed in this diff Show More