mirror of
https://github.com/graalvm/setup-graalvm.git
synced 2025-12-06 07:48:06 +08:00
Compare commits
5 Commits
e140024fdc
...
2a24120090
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2a24120090 | ||
|
|
39a82c0b2c | ||
|
|
62b95e60c7 | ||
|
|
a19d2ab4f7 | ||
|
|
319b5062ea |
52
dist/cleanup.js
generated
vendored
52
dist/cleanup.js
generated
vendored
@@ -27288,7 +27288,7 @@ function requireCore () {
|
||||
|
||||
var coreExports = requireCore();
|
||||
|
||||
const ACTION_VERSION = '1.4.0';
|
||||
const ACTION_VERSION = '1.4.1';
|
||||
const INPUT_GITHUB_TOKEN = 'github-token';
|
||||
const INPUT_CACHE = 'cache';
|
||||
process.platform === 'linux';
|
||||
@@ -69174,7 +69174,7 @@ function requireConfig () {
|
||||
|
||||
var userAgent$1 = {};
|
||||
|
||||
var version = "4.0.5";
|
||||
var version = "4.1.0";
|
||||
var require$$0$1 = {
|
||||
version: version};
|
||||
|
||||
@@ -74028,11 +74028,12 @@ function requireCache$1 () {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateCacheEntryResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "message", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, signedUploadUrl: "" };
|
||||
const message = { ok: false, signedUploadUrl: "", message: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
@@ -74049,6 +74050,9 @@ function requireCache$1 () {
|
||||
case /* string signed_upload_url */ 2:
|
||||
message.signedUploadUrl = reader.string();
|
||||
break;
|
||||
case /* string message */ 3:
|
||||
message.message = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
@@ -74067,6 +74071,9 @@ function requireCache$1 () {
|
||||
/* string signed_upload_url = 2; */
|
||||
if (message.signedUploadUrl !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||
/* string message = 3; */
|
||||
if (message.message !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
@@ -74150,11 +74157,12 @@ function requireCache$1 () {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 3, name: "message", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, entryId: "0" };
|
||||
const message = { ok: false, entryId: "0", message: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
@@ -74171,6 +74179,9 @@ function requireCache$1 () {
|
||||
case /* int64 entry_id */ 2:
|
||||
message.entryId = reader.int64().toString();
|
||||
break;
|
||||
case /* string message */ 3:
|
||||
message.message = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
@@ -74189,6 +74200,9 @@ function requireCache$1 () {
|
||||
/* int64 entry_id = 2; */
|
||||
if (message.entryId !== "0")
|
||||
writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId);
|
||||
/* string message = 3; */
|
||||
if (message.message !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
@@ -74985,7 +74999,7 @@ function requireCache () {
|
||||
});
|
||||
};
|
||||
Object.defineProperty(cache$1, "__esModule", { value: true });
|
||||
cache$1.saveCache = cache$1.restoreCache = cache$1.isFeatureAvailable = cache$1.ReserveCacheError = cache$1.ValidationError = void 0;
|
||||
cache$1.saveCache = cache$1.restoreCache = cache$1.isFeatureAvailable = cache$1.FinalizeCacheError = cache$1.ReserveCacheError = cache$1.ValidationError = void 0;
|
||||
const core = __importStar(requireCore());
|
||||
const path = __importStar(require$$0$b);
|
||||
const utils = __importStar(requireCacheUtils());
|
||||
@@ -74993,7 +75007,6 @@ function requireCache () {
|
||||
const cacheTwirpClient = __importStar(requireCacheTwirpClient());
|
||||
const config_1 = requireConfig();
|
||||
const tar_1 = requireTar();
|
||||
const constants_1 = requireConstants$1();
|
||||
const http_client_1 = requireLib();
|
||||
class ValidationError extends Error {
|
||||
constructor(message) {
|
||||
@@ -75011,6 +75024,14 @@ function requireCache () {
|
||||
}
|
||||
}
|
||||
cache$1.ReserveCacheError = ReserveCacheError;
|
||||
class FinalizeCacheError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'FinalizeCacheError';
|
||||
Object.setPrototypeOf(this, FinalizeCacheError.prototype);
|
||||
}
|
||||
}
|
||||
cache$1.FinalizeCacheError = FinalizeCacheError;
|
||||
function checkPaths(paths) {
|
||||
if (!paths || paths.length === 0) {
|
||||
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
|
||||
@@ -75387,10 +75408,6 @@ function requireCache () {
|
||||
}
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
|
||||
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
|
||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
|
||||
}
|
||||
// Set the archive size in the options, will be used to display the upload progress
|
||||
options.archiveSizeBytes = archiveFileSize;
|
||||
core.debug('Reserving Cache');
|
||||
@@ -75403,7 +75420,10 @@ function requireCache () {
|
||||
try {
|
||||
const response = yield twirpClient.CreateCacheEntry(request);
|
||||
if (!response.ok) {
|
||||
throw new Error('Response was not ok');
|
||||
if (response.message) {
|
||||
core.warning(`Cache reservation failed: ${response.message}`);
|
||||
}
|
||||
throw new Error(response.message || 'Response was not ok');
|
||||
}
|
||||
signedUploadUrl = response.signedUploadUrl;
|
||||
}
|
||||
@@ -75421,6 +75441,9 @@ function requireCache () {
|
||||
const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest);
|
||||
core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`);
|
||||
if (!finalizeResponse.ok) {
|
||||
if (finalizeResponse.message) {
|
||||
throw new FinalizeCacheError(finalizeResponse.message);
|
||||
}
|
||||
throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`);
|
||||
}
|
||||
cacheId = parseInt(finalizeResponse.entryId);
|
||||
@@ -75433,6 +75456,9 @@ function requireCache () {
|
||||
else if (typedError.name === ReserveCacheError.name) {
|
||||
core.info(`Failed to save: ${typedError.message}`);
|
||||
}
|
||||
else if (typedError.name === FinalizeCacheError.name) {
|
||||
core.warning(typedError.message);
|
||||
}
|
||||
else {
|
||||
// Log server errors (5xx) as errors, all other errors as warnings
|
||||
if (typedError instanceof http_client_1.HttpClientError &&
|
||||
|
||||
56
dist/main.js
generated
vendored
56
dist/main.js
generated
vendored
@@ -39,7 +39,7 @@ import * as https from 'node:https';
|
||||
import * as zlib from 'node:zlib';
|
||||
import require$$1$5 from 'tty';
|
||||
|
||||
const ACTION_VERSION = '1.4.0';
|
||||
const ACTION_VERSION = '1.4.1';
|
||||
const INPUT_VERSION = 'version';
|
||||
const INPUT_GDS_TOKEN = 'gds-token';
|
||||
const INPUT_JAVA_VERSION = 'java-version';
|
||||
@@ -36612,7 +36612,7 @@ async function getLatestRelease(repo) {
|
||||
return (await octokit.request('GET /repos/{owner}/{repo}/releases/latest', {
|
||||
owner: GRAALVM_GH_USER,
|
||||
repo
|
||||
})).data;
|
||||
})).data; /** missing digest property */
|
||||
}
|
||||
async function getContents(repo, path) {
|
||||
const octokit = getOctokit();
|
||||
@@ -36628,7 +36628,7 @@ async function getTaggedRelease(owner, repo, tag) {
|
||||
owner,
|
||||
repo,
|
||||
tag
|
||||
})).data;
|
||||
})).data; /** missing digest property */
|
||||
}
|
||||
async function getMatchingTags(owner, repo, tagPrefix) {
|
||||
const octokit = getOctokit();
|
||||
@@ -79033,7 +79033,7 @@ function requireConfig () {
|
||||
|
||||
var userAgent = {};
|
||||
|
||||
var version = "4.0.5";
|
||||
var version = "4.1.0";
|
||||
var require$$0$1 = {
|
||||
version: version};
|
||||
|
||||
@@ -83887,11 +83887,12 @@ function requireCache$1 () {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateCacheEntryResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "message", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, signedUploadUrl: "" };
|
||||
const message = { ok: false, signedUploadUrl: "", message: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
@@ -83908,6 +83909,9 @@ function requireCache$1 () {
|
||||
case /* string signed_upload_url */ 2:
|
||||
message.signedUploadUrl = reader.string();
|
||||
break;
|
||||
case /* string message */ 3:
|
||||
message.message = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
@@ -83926,6 +83930,9 @@ function requireCache$1 () {
|
||||
/* string signed_upload_url = 2; */
|
||||
if (message.signedUploadUrl !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||
/* string message = 3; */
|
||||
if (message.message !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
@@ -84009,11 +84016,12 @@ function requireCache$1 () {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 3, name: "message", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, entryId: "0" };
|
||||
const message = { ok: false, entryId: "0", message: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
@@ -84030,6 +84038,9 @@ function requireCache$1 () {
|
||||
case /* int64 entry_id */ 2:
|
||||
message.entryId = reader.int64().toString();
|
||||
break;
|
||||
case /* string message */ 3:
|
||||
message.message = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
@@ -84048,6 +84059,9 @@ function requireCache$1 () {
|
||||
/* int64 entry_id = 2; */
|
||||
if (message.entryId !== "0")
|
||||
writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId);
|
||||
/* string message = 3; */
|
||||
if (message.message !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
@@ -84844,7 +84858,7 @@ function requireCache () {
|
||||
});
|
||||
};
|
||||
Object.defineProperty(cache$1, "__esModule", { value: true });
|
||||
cache$1.saveCache = cache$1.restoreCache = cache$1.isFeatureAvailable = cache$1.ReserveCacheError = cache$1.ValidationError = void 0;
|
||||
cache$1.saveCache = cache$1.restoreCache = cache$1.isFeatureAvailable = cache$1.FinalizeCacheError = cache$1.ReserveCacheError = cache$1.ValidationError = void 0;
|
||||
const core = __importStar(requireCore());
|
||||
const path = __importStar(require$$0__default$2);
|
||||
const utils = __importStar(requireCacheUtils());
|
||||
@@ -84852,7 +84866,6 @@ function requireCache () {
|
||||
const cacheTwirpClient = __importStar(requireCacheTwirpClient());
|
||||
const config_1 = requireConfig();
|
||||
const tar_1 = requireTar();
|
||||
const constants_1 = requireConstants();
|
||||
const http_client_1 = requireLib();
|
||||
class ValidationError extends Error {
|
||||
constructor(message) {
|
||||
@@ -84870,6 +84883,14 @@ function requireCache () {
|
||||
}
|
||||
}
|
||||
cache$1.ReserveCacheError = ReserveCacheError;
|
||||
class FinalizeCacheError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'FinalizeCacheError';
|
||||
Object.setPrototypeOf(this, FinalizeCacheError.prototype);
|
||||
}
|
||||
}
|
||||
cache$1.FinalizeCacheError = FinalizeCacheError;
|
||||
function checkPaths(paths) {
|
||||
if (!paths || paths.length === 0) {
|
||||
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
|
||||
@@ -85246,10 +85267,6 @@ function requireCache () {
|
||||
}
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
|
||||
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
|
||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
|
||||
}
|
||||
// Set the archive size in the options, will be used to display the upload progress
|
||||
options.archiveSizeBytes = archiveFileSize;
|
||||
core.debug('Reserving Cache');
|
||||
@@ -85262,7 +85279,10 @@ function requireCache () {
|
||||
try {
|
||||
const response = yield twirpClient.CreateCacheEntry(request);
|
||||
if (!response.ok) {
|
||||
throw new Error('Response was not ok');
|
||||
if (response.message) {
|
||||
core.warning(`Cache reservation failed: ${response.message}`);
|
||||
}
|
||||
throw new Error(response.message || 'Response was not ok');
|
||||
}
|
||||
signedUploadUrl = response.signedUploadUrl;
|
||||
}
|
||||
@@ -85280,6 +85300,9 @@ function requireCache () {
|
||||
const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest);
|
||||
core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`);
|
||||
if (!finalizeResponse.ok) {
|
||||
if (finalizeResponse.message) {
|
||||
throw new FinalizeCacheError(finalizeResponse.message);
|
||||
}
|
||||
throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`);
|
||||
}
|
||||
cacheId = parseInt(finalizeResponse.entryId);
|
||||
@@ -85292,6 +85315,9 @@ function requireCache () {
|
||||
else if (typedError.name === ReserveCacheError.name) {
|
||||
core.info(`Failed to save: ${typedError.message}`);
|
||||
}
|
||||
else if (typedError.name === FinalizeCacheError.name) {
|
||||
core.warning(typedError.message);
|
||||
}
|
||||
else {
|
||||
// Log server errors (5xx) as errors, all other errors as warnings
|
||||
if (typedError instanceof http_client_1.HttpClientError &&
|
||||
|
||||
1078
package-lock.json
generated
1078
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
23
package.json
23
package.json
@@ -2,7 +2,7 @@
|
||||
"name": "setup-graalvm",
|
||||
"author": "GraalVM Community",
|
||||
"description": "GitHub Action for GraalVM",
|
||||
"version": "1.4.0",
|
||||
"version": "1.4.1",
|
||||
"type": "module",
|
||||
"private": true,
|
||||
"repository": {
|
||||
@@ -37,7 +37,7 @@
|
||||
},
|
||||
"license": "UPL",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^4.0.5",
|
||||
"@actions/cache": "^4.1.0",
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/github": "^6.0.1",
|
||||
@@ -45,24 +45,23 @@
|
||||
"@actions/http-client": "^2.2.3",
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.2",
|
||||
"@octokit/types": "^14.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@github/dependency-submission-toolkit": "^2.0.5",
|
||||
"semver": "^7.7.2",
|
||||
"uuid": "^13.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/compat": "^1.3.2",
|
||||
"@eslint/compat": "^1.4.0",
|
||||
"@rollup/plugin-commonjs": "^28.0.6",
|
||||
"@rollup/plugin-json": "^6.1.0",
|
||||
"@rollup/plugin-node-resolve": "^16.0.1",
|
||||
"@rollup/plugin-typescript": "^12.1.4",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/node": "^24.4.0",
|
||||
"@types/node": "^24.6.1",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.43.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.45.0",
|
||||
"@typescript-eslint/parser": "^8.31.1",
|
||||
"eslint": "^9.35.0",
|
||||
"eslint": "^9.36.0",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-import-resolver-typescript": "^4.4.4",
|
||||
"eslint-plugin-import": "^2.32.0",
|
||||
@@ -70,15 +69,15 @@
|
||||
"eslint-plugin-jsonc": "^2.20.1",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-prettier": "^5.5.4",
|
||||
"jest": "^30.1.3",
|
||||
"jest": "^30.2.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"prettier": "^3.6.2",
|
||||
"prettier-eslint": "^16.4.2",
|
||||
"rollup": "^4.50.2",
|
||||
"ts-jest": "^29.4.1",
|
||||
"rollup": "^4.52.3",
|
||||
"ts-jest": "^29.4.4",
|
||||
"ts-jest-resolver": "^2.0.1",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.9.2"
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@rollup/rollup-linux-x64-gnu": "*"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as otypes from '@octokit/types'
|
||||
|
||||
export const ACTION_VERSION = '1.4.0'
|
||||
export const ACTION_VERSION = '1.4.1'
|
||||
|
||||
export const INPUT_VERSION = 'version'
|
||||
export const INPUT_GDS_TOKEN = 'gds-token'
|
||||
@@ -53,13 +53,13 @@ export const ERROR_REQUEST = 'Please file an issue at: https://github.com/graalv
|
||||
export const ERROR_HINT =
|
||||
'If you think this is a mistake, please file an issue at: https://github.com/graalvm/setup-graalvm/issues.'
|
||||
|
||||
export type LatestReleaseResponse = otypes.Endpoints['GET /repos/{owner}/{repo}/releases/latest']['response']
|
||||
export type LatestReleaseResponseData =
|
||||
otypes.Endpoints['GET /repos/{owner}/{repo}/releases/latest']['response']['data']
|
||||
|
||||
export type MatchingRefsResponse = otypes.Endpoints['GET /repos/{owner}/{repo}/git/matching-refs/{ref}']['response']
|
||||
export type MatchingRefsResponseData =
|
||||
otypes.Endpoints['GET /repos/{owner}/{repo}/git/matching-refs/{ref}']['response']['data']
|
||||
|
||||
export type ReleasesResponse = otypes.Endpoints['GET /repos/{owner}/{repo}/releases']['response']
|
||||
|
||||
export type ContentsResponse = otypes.Endpoints['GET /repos/{owner}/{repo}/contents/{path}']['response']
|
||||
export type ContentsResponseData = otypes.Endpoints['GET /repos/{owner}/{repo}/contents/{path}']['response']['data']
|
||||
|
||||
export interface OracleGraalVMEAFile {
|
||||
filename: string
|
||||
|
||||
@@ -174,7 +174,7 @@ export async function setUpGraalVMJDKDevBuild(): Promise<string> {
|
||||
return downloadAndExtractJDK(downloadUrl)
|
||||
}
|
||||
|
||||
export function findHighestJavaVersion(release: c.LatestReleaseResponse['data'], version: string): string {
|
||||
export function findHighestJavaVersion(release: c.LatestReleaseResponseData, version: string): string {
|
||||
const graalVMIdentifierPattern = determineGraalVMLegacyIdentifier(false, version, '(\\d+)')
|
||||
const expectedFileNameRegExp = new RegExp(
|
||||
`^${graalVMIdentifierPattern}${c.GRAALVM_FILE_EXTENSION.replace(/\./g, '\\.')}$`
|
||||
@@ -214,7 +214,7 @@ export async function setUpGraalVMLatest_22_X(gdsToken: string, javaVersion: str
|
||||
return setUpGraalVMRelease(gdsToken, version, javaVersion)
|
||||
}
|
||||
|
||||
export function findGraalVMVersion(release: c.LatestReleaseResponse['data']) {
|
||||
export function findGraalVMVersion(release: c.LatestReleaseResponseData) {
|
||||
const tag_name = release.tag_name
|
||||
if (!tag_name.startsWith(GRAALVM_TAG_PREFIX)) {
|
||||
throw new Error(`Could not find latest GraalVM release: ${tag_name}`)
|
||||
@@ -234,7 +234,7 @@ export async function setUpGraalVMRelease(gdsToken: string, version: string, jav
|
||||
return downloadExtractAndCacheJDK(downloader, toolName, version)
|
||||
}
|
||||
|
||||
function findDownloadUrl(release: c.LatestReleaseResponse['data'], javaVersion: string): string {
|
||||
function findDownloadUrl(release: c.LatestReleaseResponseData, javaVersion: string): string {
|
||||
const graalVMIdentifier = determineGraalVMLegacyIdentifier(false, c.VERSION_DEV, javaVersion)
|
||||
const expectedFileName = `${graalVMIdentifier}${c.GRAALVM_FILE_EXTENSION}`
|
||||
for (const asset of release.assets) {
|
||||
|
||||
16
src/utils.ts
16
src/utils.ts
@@ -18,17 +18,17 @@ export async function exec(commandLine: string, args?: string[], options?: ExecO
|
||||
}
|
||||
}
|
||||
|
||||
export async function getLatestRelease(repo: string): Promise<c.LatestReleaseResponse['data']> {
|
||||
export async function getLatestRelease(repo: string): Promise<c.LatestReleaseResponseData> {
|
||||
const octokit = getOctokit()
|
||||
return (
|
||||
await octokit.request('GET /repos/{owner}/{repo}/releases/latest', {
|
||||
owner: c.GRAALVM_GH_USER,
|
||||
repo
|
||||
})
|
||||
).data
|
||||
).data as c.LatestReleaseResponseData /** missing digest property */
|
||||
}
|
||||
|
||||
export async function getContents(repo: string, path: string): Promise<c.ContentsResponse['data']> {
|
||||
export async function getContents(repo: string, path: string): Promise<c.ContentsResponseData> {
|
||||
const octokit = getOctokit()
|
||||
return (
|
||||
await octokit.request('GET /repos/{owner}/{repo}/contents/{path}', {
|
||||
@@ -39,11 +39,7 @@ export async function getContents(repo: string, path: string): Promise<c.Content
|
||||
).data
|
||||
}
|
||||
|
||||
export async function getTaggedRelease(
|
||||
owner: string,
|
||||
repo: string,
|
||||
tag: string
|
||||
): Promise<c.LatestReleaseResponse['data']> {
|
||||
export async function getTaggedRelease(owner: string, repo: string, tag: string): Promise<c.LatestReleaseResponseData> {
|
||||
const octokit = getOctokit()
|
||||
return (
|
||||
await octokit.request('GET /repos/{owner}/{repo}/releases/tags/{tag}', {
|
||||
@@ -51,14 +47,14 @@ export async function getTaggedRelease(
|
||||
repo,
|
||||
tag
|
||||
})
|
||||
).data
|
||||
).data as c.LatestReleaseResponseData /** missing digest property */
|
||||
}
|
||||
|
||||
export async function getMatchingTags(
|
||||
owner: string,
|
||||
repo: string,
|
||||
tagPrefix: string
|
||||
): Promise<c.MatchingRefsResponse['data']> {
|
||||
): Promise<c.MatchingRefsResponseData> {
|
||||
const octokit = getOctokit()
|
||||
return (
|
||||
await octokit.request('GET /repos/{owner}/{repo}/git/matching-refs/tags/{tagPrefix}', {
|
||||
|
||||
Reference in New Issue
Block a user